Skip to content

[DPE-4820] Update unit tests after fixing GCP backup test #528

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 17 commits into from
Jul 11, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 29 additions & 2 deletions tests/unit/test_backups.py
Original file line number Diff line number Diff line change
Expand Up @@ -1101,7 +1101,15 @@ def test_on_s3_credential_changed(harness):
_can_initialise_stanza.assert_called_once()
_is_primary.assert_not_called()

# Test that followers will not initialise the bucket
# Test that followers will not initialise the bucket (and that only the leader will
# remove the "require-change-bucket-after-restore" flag from the application databag).
with harness.hooks_disabled():
harness.set_leader()
harness.update_relation_data(
peer_rel_id,
harness.charm.app.name,
{"require-change-bucket-after-restore": "True"},
)
harness.charm.unit.status = ActiveStatus()
_render_pgbackrest_conf_file.reset_mock()
_can_initialise_stanza.return_value = True
Expand All @@ -1116,14 +1124,20 @@ def test_on_s3_credential_changed(harness):
relation=harness.model.get_relation(S3_PARAMETERS_RELATION, s3_rel_id)
)
_render_pgbackrest_conf_file.assert_called_once()
tc.assertNotIn(
"require-change-bucket-after-restore",
harness.get_relation_data(peer_rel_id, harness.charm.app),
)
_is_primary.assert_called_once()
_create_bucket_if_not_exists.assert_not_called()
tc.assertIsInstance(harness.charm.unit.status, ActiveStatus)
_can_use_s3_repository.assert_not_called()
_initialise_stanza.assert_not_called()

# Test when the charm render the pgBackRest configuration file, but fails to
# access or create the S3 bucket.
# access or create the S3 bucket (and assert that a non-leader unit won't
# remove the "require-change-bucket-after-restore" flag from the application
# databag).
_is_primary.return_value = True
for error in [
ClientError(
Expand All @@ -1132,13 +1146,26 @@ def test_on_s3_credential_changed(harness):
),
ValueError,
]:
with harness.hooks_disabled():
harness.set_leader(False)
harness.update_relation_data(
peer_rel_id,
harness.charm.app.name,
{"require-change-bucket-after-restore": "True"},
)
_render_pgbackrest_conf_file.reset_mock()
_create_bucket_if_not_exists.reset_mock()
_create_bucket_if_not_exists.side_effect = error
harness.charm.backup.s3_client.on.credentials_changed.emit(
relation=harness.model.get_relation(S3_PARAMETERS_RELATION, s3_rel_id)
)
_render_pgbackrest_conf_file.assert_called_once()
tc.assertEqual(
harness.get_relation_data(peer_rel_id, harness.charm.app)[
"require-change-bucket-after-restore"
],
"True",
)
_create_bucket_if_not_exists.assert_called_once()
tc.assertIsInstance(harness.charm.unit.status, BlockedStatus)
tc.assertEqual(
Expand Down
32 changes: 31 additions & 1 deletion tests/unit/test_charm.py
Original file line number Diff line number Diff line change
Expand Up @@ -2362,14 +2362,44 @@ def test_update_new_unit_status(harness):
assert isinstance(harness.charm.unit.status, WaitingStatus)


def test_set_primary_status_message(harness):
@pytest.mark.parametrize("is_leader", [True, False])
def test_set_primary_status_message(harness, is_leader):
with (
patch("charm.Patroni.member_started", new_callable=PropertyMock) as _member_started,
patch(
"charm.PostgresqlOperatorCharm.is_standby_leader", new_callable=PropertyMock
) as _is_standby_leader,
patch("charm.Patroni.get_primary") as _get_primary,
):
# Test scenario when it's needed to move to another S3 bucket after a restore.
databag_containing_restore_data = {
"require-change-bucket-after-restore": "True",
"restoring-backup": "2024-01-01T09:00:00Z",
"restore-stanza": "fake-stanza",
"restore-to-time": "",
}
with harness.hooks_disabled():
harness.update_relation_data(
harness.model.get_relation(PEER).id,
harness.charm.app.name,
databag_containing_restore_data,
)
harness.set_leader(is_leader)
harness.charm._set_primary_status_message()
harness.get_relation_data(harness.model.get_relation(PEER).id, harness.charm.app.name) == (
{"require-change-bucket-after-restore": "True"}
if is_leader
else databag_containing_restore_data
)
tc.assertIsInstance(harness.charm.unit.status, BlockedStatus)

# Test other scenarios.
with harness.hooks_disabled():
harness.update_relation_data(
harness.model.get_relation(PEER).id,
harness.charm.app.name,
{"require-change-bucket-after-restore": ""},
)
for values in itertools.product(
[
RetryError(last_attempt=1),
Expand Down
77 changes: 75 additions & 2 deletions tests/unit/test_cluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from charms.operator_libs_linux.v2 import snap
from jinja2 import Template
from ops.testing import Harness
from tenacity import stop_after_delay
from tenacity import RetryError, stop_after_delay, wait_fixed

from charm import PostgresqlOperatorCharm
from cluster import Patroni
Expand Down Expand Up @@ -429,7 +429,9 @@ def test_switchover(peers_ips, patroni):


def test_update_synchronous_node_count(peers_ips, patroni):
with patch("requests.patch") as _patch:
with patch("cluster.stop_after_delay", return_value=stop_after_delay(0)) as _wait_fixed, patch(
"cluster.wait_fixed", return_value=wait_fixed(0)
) as _wait_fixed, patch("requests.patch") as _patch:
response = _patch.return_value
response.status_code = 200

Expand All @@ -439,6 +441,11 @@ def test_update_synchronous_node_count(peers_ips, patroni):
"http://1.1.1.1:8008/config", json={"synchronous_node_count": 0}, verify=True
)

# Test when the request fails.
response.status_code = 500
with tc.assertRaises(RetryError):
patroni.update_synchronous_node_count()


def test_configure_patroni_on_unit(peers_ips, patroni):
with (
Expand Down Expand Up @@ -542,3 +549,69 @@ def test_member_inactive_error(peers_ips, patroni):
assert patroni.member_inactive

_get.assert_called_once_with("http://1.1.1.1:8008/health", verify=True, timeout=5)


def test_patroni_logs(patroni):
with patch("charm.snap.SnapCache") as _snap_cache:
# Test when the logs are returned successfully.
logs = _snap_cache.return_value.__getitem__.return_value.logs
logs.return_value = "fake-logs"
assert patroni.patroni_logs() == "fake-logs"

# Test the charm fails to get the logs.
logs.side_effect = snap.SnapError
assert patroni.patroni_logs() == ""


def test_last_postgresql_logs(patroni):
with patch("glob.glob") as _glob, patch(
"builtins.open", mock_open(read_data="fake-logs")
) as _open:
# Test when there are no files to read.
assert patroni.last_postgresql_logs() == ""
_open.assert_not_called()

# Test when there are multiple files in the logs directory.
_glob.return_value = [
"/var/snap/charmed-postgresql/common/var/log/postgresql/postgresql.log.1",
"/var/snap/charmed-postgresql/common/var/log/postgresql/postgresql.log.2",
"/var/snap/charmed-postgresql/common/var/log/postgresql/postgresql.log.3",
]
assert patroni.last_postgresql_logs() == "fake-logs"
_open.assert_called_once_with(
"/var/snap/charmed-postgresql/common/var/log/postgresql/postgresql.log.3", "r"
)

# Test when the charm fails to read the logs.
_open.reset_mock()
_open.side_effect = OSError
assert patroni.last_postgresql_logs() == ""
_open.assert_called_with(
"/var/snap/charmed-postgresql/common/var/log/postgresql/postgresql.log.3", "r"
)


def test_get_patroni_restart_condition(patroni):
mock = mock_open()
with patch("builtins.open", mock) as _open:
# Test when there is a restart condition set.
_open.return_value.__enter__.return_value.read.return_value = "Restart=always"
assert patroni.get_patroni_restart_condition() == "always"

# Test when there is no restart condition set.
_open.return_value.__enter__.return_value.read.return_value = ""
with tc.assertRaises(RuntimeError):
patroni.get_patroni_restart_condition()


@pytest.mark.parametrize("new_restart_condition", ["on-success", "on-failure"])
def test_update_patroni_restart_condition(patroni, new_restart_condition):
with patch("builtins.open", mock_open(read_data="Restart=always")) as _open, patch(
"subprocess.run"
) as _run:
_open.return_value.__enter__.return_value.read.return_value = "Restart=always"
patroni.update_patroni_restart_condition(new_restart_condition)
_open.return_value.__enter__.return_value.write.assert_called_once_with(
f"Restart={new_restart_condition}"
)
_run.assert_called_once_with(["/bin/systemctl", "daemon-reload"])
Loading