From bd770e2b81e84de92acc8a52672fb4b1d06ae32d Mon Sep 17 00:00:00 2001 From: Gustavo Diaz Date: Tue, 23 Sep 2025 01:51:25 +0000 Subject: [PATCH] =?UTF-8?q?tests:=20Ready=20condition=20rename=20(ACK.Reso?= =?UTF-8?q?urceSynced=E2=86=92Ready,=20assert=5F*=5Fsynced=E2=86=92assert?= =?UTF-8?q?=5F*=5Fready)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- go.mod | 2 ++ go.sum | 4 ++-- test/e2e/requirements.txt | 2 +- test/e2e/tests/test_cache_cluster.py | 4 ++-- test/e2e/tests/test_replicationgroup.py | 16 ++++++++-------- test/e2e/tests/test_serverless_cache.py | 12 ++++++------ test/e2e/tests/test_serverless_cache_snapshot.py | 4 ++-- test/e2e/tests/test_snapshot.py | 2 +- test/e2e/tests/test_user.py | 6 +++--- test/e2e/tests/test_usergroup.py | 4 ++-- 10 files changed, 29 insertions(+), 27 deletions(-) diff --git a/go.mod b/go.mod index 83263cda..e1d97053 100644 --- a/go.mod +++ b/go.mod @@ -91,3 +91,5 @@ require ( sigs.k8s.io/structured-merge-diff/v4 v4.4.2 // indirect sigs.k8s.io/yaml v1.4.0 // indirect ) + +replace github.com/aws-controllers-k8s/runtime => github.com/gustavodiaz7722/ack-runtime v0.51.0 diff --git a/go.sum b/go.sum index 1159aa35..ee7a6f1c 100644 --- a/go.sum +++ b/go.sum @@ -2,8 +2,6 @@ github.com/aws-controllers-k8s/ec2-controller v1.0.7 h1:7MDu2bq8NFKbgzzgHYPFRT7b github.com/aws-controllers-k8s/ec2-controller v1.0.7/go.mod h1:PvsQehgncHgcu9FiY13M45+GkVsKI98g7G83SrgH7vY= github.com/aws-controllers-k8s/kms-controller v1.0.2 h1:v8nh/oaX/U6spCwBDaWyem7XXpzoP/MnkJyEjNOZN9s= github.com/aws-controllers-k8s/kms-controller v1.0.2/go.mod h1:BeoijsyGjJ9G5VcDjpFdxBW0IxaeKXYX497XmUJiPSQ= -github.com/aws-controllers-k8s/runtime v0.52.0 h1:Q5UIAn6SSBr60t/DiU/zr6NLBlUuK2AG3yy2ma/9gDU= -github.com/aws-controllers-k8s/runtime v0.52.0/go.mod h1:OkUJN+Ds799JLYZsMJrO2vDJ4snxUeHK2MgrQHbU+Qc= github.com/aws-controllers-k8s/sns-controller v1.0.11 h1:nnkywTHzO64y7RrrfoPNyYf1TOkkQHtlg+S0jEPKUZ8= github.com/aws-controllers-k8s/sns-controller v1.0.11/go.mod h1:ODQIDZR3hHQqcyif4UXVFQfEzTaWU1jqFtVr83K2p9M= github.com/aws/aws-sdk-go v1.49.0 h1:g9BkW1fo9GqKfwg2+zCD+TW/D36Ux+vtfJ8guF4AYmY= @@ -90,6 +88,8 @@ github.com/google/pprof v0.0.0-20241029153458-d1b30febd7db h1:097atOisP2aRj7vFgY github.com/google/pprof v0.0.0-20241029153458-d1b30febd7db/go.mod h1:vavhavw2zAxS5dIdcRluK6cSGGPlZynqzFM8NdvU144= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/gustavodiaz7722/ack-runtime v0.51.0 h1:dgf25lfg5r1kjJtHzdbNrMtQVloYn77WLYi1X41NXhw= +github.com/gustavodiaz7722/ack-runtime v0.51.0/go.mod h1:OkUJN+Ds799JLYZsMJrO2vDJ4snxUeHK2MgrQHbU+Qc= github.com/itchyny/gojq v0.12.6 h1:VjaFn59Em2wTxDNGcrRkDK9ZHMNa8IksOgL13sLL4d0= github.com/itchyny/gojq v0.12.6/go.mod h1:ZHrkfu7A+RbZLy5J1/JKpS4poEqrzItSTGDItqsfP0A= github.com/itchyny/timefmt-go v0.1.3 h1:7M3LGVDsqcd0VZH2U+x393obrzZisp7C0uEe921iRkU= diff --git a/test/e2e/requirements.txt b/test/e2e/requirements.txt index e469fd1c..9b2728f0 100644 --- a/test/e2e/requirements.txt +++ b/test/e2e/requirements.txt @@ -1 +1 @@ -acktest @ git+https://github.com/aws-controllers-k8s/test-infra.git@5a09bbdb961ea14a65b15b63769134125023ac61 \ No newline at end of file +acktest @ git+https://github.com/gustavodiaz7722/ack-test-infra.git@075991b980ffbc9177f0427270be707359240f89 diff --git a/test/e2e/tests/test_cache_cluster.py b/test/e2e/tests/test_cache_cluster.py index 7057da1f..639e92db 100644 --- a/test/e2e/tests/test_cache_cluster.py +++ b/test/e2e/tests/test_cache_cluster.py @@ -70,9 +70,9 @@ def get_and_assert_status(ref: k8s.CustomResourceReference, expected_status: str assert cr['status']['cacheClusterStatus'] == expected_status if expected_synced: - condition.assert_synced(ref) + condition.assert_ready(ref) else: - condition.assert_not_synced(ref) + condition.assert_not_ready(ref) @pytest.fixture(scope="module") diff --git a/test/e2e/tests/test_replicationgroup.py b/test/e2e/tests/test_replicationgroup.py index 5667c28d..2d5c5bc9 100644 --- a/test/e2e/tests/test_replicationgroup.py +++ b/test/e2e/tests/test_replicationgroup.py @@ -171,7 +171,7 @@ class TestReplicationGroup: def test_rg_cmd_fromsnapshot(self, rg_cmd_fromsnapshot): (reference, _) = rg_cmd_fromsnapshot assert k8s.wait_on_condition( - reference, "ACK.ResourceSynced", "True", wait_periods=90) + reference, "Ready", "True", wait_periods=90) def test_rg_invalid_primary(self, make_rg_name, make_replication_group, rg_deletion_waiter): input_dict = { @@ -196,7 +196,7 @@ def test_rg_invalid_primary(self, make_rg_name, make_replication_group, rg_delet def test_rg_update(self, rg_update_input, rg_update): (reference, _) = rg_update assert k8s.wait_on_condition( - reference, "ACK.ResourceSynced", "True", wait_periods=90) + reference, "Ready", "True", wait_periods=90) # desired initial state cr = k8s.get_resource(reference) @@ -246,7 +246,7 @@ def test_rg_update(self, rg_update_input, rg_update): _ = k8s.patch_custom_resource(reference, patch) sleep(DEFAULT_WAIT_SECS) assert k8s.wait_on_condition( - reference, "ACK.ResourceSynced", "True", wait_periods=90) + reference, "Ready", "True", wait_periods=90) # Assert new state resource = k8s.get_resource(reference) @@ -266,7 +266,7 @@ def test_rg_update(self, rg_update_input, rg_update): LONG_WAIT_SECS = 180 sleep(LONG_WAIT_SECS) assert k8s.wait_on_condition( - reference, "ACK.ResourceSynced", "True", wait_periods=90) + reference, "Ready", "True", wait_periods=90) # assert new tags assert_spec_tags(rg_id, new_tags) @@ -275,7 +275,7 @@ def test_rg_update(self, rg_update_input, rg_update): def test_rg_fault_tolerance(self, rg_fault_tolerance): (reference, _) = rg_fault_tolerance assert k8s.wait_on_condition( - reference, "ACK.ResourceSynced", "True", wait_periods=90) + reference, "Ready", "True", wait_periods=90) # assert initial state resource = k8s.get_resource(reference) @@ -300,7 +300,7 @@ def test_rg_fault_tolerance(self, rg_fault_tolerance): _ = k8s.patch_custom_resource(reference, patch) sleep(DEFAULT_WAIT_SECS) assert k8s.wait_on_condition( - reference, "ACK.ResourceSynced", "True", wait_periods=90) + reference, "Ready", "True", wait_periods=90) # assert new state resource = k8s.get_resource(reference) @@ -313,7 +313,7 @@ def test_rg_fault_tolerance(self, rg_fault_tolerance): _ = k8s.patch_custom_resource(reference, patch) sleep(DEFAULT_WAIT_SECS) assert k8s.wait_on_condition( - reference, "ACK.ResourceSynced", "True", wait_periods=90) + reference, "Ready", "True", wait_periods=90) # assert roles resource = k8s.get_resource(reference) @@ -343,7 +343,7 @@ def test_rg_creation_deletion(self, make_rg_name, make_replication_group, rg_del "replicationgroup_create_delete", input_dict, input_dict["RG_ID"]) assert k8s.wait_on_condition( - reference, "ACK.ResourceSynced", "True", wait_periods=90) + reference, "Ready", "True", wait_periods=90) # assertions after initial creation resource = k8s.get_resource(reference) diff --git a/test/e2e/tests/test_serverless_cache.py b/test/e2e/tests/test_serverless_cache.py index e25078fa..31d75398 100644 --- a/test/e2e/tests/test_serverless_cache.py +++ b/test/e2e/tests/test_serverless_cache.py @@ -58,9 +58,9 @@ def get_and_assert_status(ref: k8s.CustomResourceReference, expected_status: str assert cr['status']['status'] == expected_status if expected_synced: - condition.assert_synced(ref) + condition.assert_ready(ref) else: - condition.assert_not_synced(ref) + condition.assert_not_ready(ref) @pytest.fixture(scope="module") @@ -129,7 +129,7 @@ def test_create_update_delete_serverless_cache(self, simple_serverless_cache, el (ref, _) = simple_serverless_cache assert k8s.wait_on_condition( - ref, "ACK.ResourceSynced", "True", wait_periods=90 + ref, "Ready", "True", wait_periods=90 ) get_and_assert_status(ref, "available", True) @@ -162,7 +162,7 @@ def test_create_update_delete_serverless_cache(self, simple_serverless_cache, el # Wait for update to be synced assert k8s.wait_on_condition( - ref, "ACK.ResourceSynced", "True", wait_periods=90 + ref, "Ready", "True", wait_periods=90 ) # Verify the update was applied @@ -178,7 +178,7 @@ def test_upgrade_redis_to_valkey(self, upgrade_serverless_cache, elasticache_cli # Wait for the serverless cache to be created and become available assert k8s.wait_on_condition( - ref, "ACK.ResourceSynced", "True", wait_periods=90 + ref, "Ready", "True", wait_periods=90 ) get_and_assert_status(ref, "available", True) @@ -206,7 +206,7 @@ def test_upgrade_redis_to_valkey(self, upgrade_serverless_cache, elasticache_cli # Wait for upgrade to be synced assert k8s.wait_on_condition( - ref, "ACK.ResourceSynced", "True", wait_periods=90 + ref, "Ready", "True", wait_periods=90 ) # Wait for it to be available again after upgrade diff --git a/test/e2e/tests/test_serverless_cache_snapshot.py b/test/e2e/tests/test_serverless_cache_snapshot.py index c645d34f..e2dd9eb5 100644 --- a/test/e2e/tests/test_serverless_cache_snapshot.py +++ b/test/e2e/tests/test_serverless_cache_snapshot.py @@ -64,7 +64,7 @@ def serverless_cache_for_snapshot(elasticache_client): # Wait for serverless cache to be available assert k8s.wait_on_condition( - ref, "ACK.ResourceSynced", "True", wait_periods=90 + ref, "Ready", "True", wait_periods=90 ) yield ref, cr @@ -112,7 +112,7 @@ def test_create_delete_serverless_cache_snapshot(self, simple_serverless_cache_s (ref, _) = simple_serverless_cache_snapshot assert k8s.wait_on_condition( - ref, "ACK.ResourceSynced", "True", wait_periods=120 + ref, "Ready", "True", wait_periods=120 ) tag_updates = { diff --git a/test/e2e/tests/test_snapshot.py b/test/e2e/tests/test_snapshot.py index 340733bd..92100247 100644 --- a/test/e2e/tests/test_snapshot.py +++ b/test/e2e/tests/test_snapshot.py @@ -82,4 +82,4 @@ class TestSnapshot: # test create of snapshot while providing KMS key def test_snapshot_kms(self, snapshot_kms): (reference, _) = snapshot_kms - assert k8s.wait_on_condition(reference, "ACK.ResourceSynced", "True", wait_periods=15) + assert k8s.wait_on_condition(reference, "Ready", "True", wait_periods=15) diff --git a/test/e2e/tests/test_user.py b/test/e2e/tests/test_user.py index b0f5a9e4..7af48db0 100644 --- a/test/e2e/tests/test_user.py +++ b/test/e2e/tests/test_user.py @@ -115,7 +115,7 @@ def test_user_nopass(self, user_nopass, user_nopass_input): (reference, resource) = user_nopass assert k8s.get_resource_exists(reference) - assert k8s.wait_on_condition(reference, "ACK.ResourceSynced", "True", wait_periods=5) + assert k8s.wait_on_condition(reference, "Ready", "True", wait_periods=5) resource = k8s.get_resource(reference) assert resource["status"]["lastRequestedAccessString"] == user_nopass_input["ACCESS_STRING"] @@ -124,7 +124,7 @@ def test_user_nopass(self, user_nopass, user_nopass_input): _ = k8s.patch_custom_resource(reference, user_patch) sleep(DEFAULT_WAIT_SECS) - assert k8s.wait_on_condition(reference, "ACK.ResourceSynced", "True", wait_periods=5) + assert k8s.wait_on_condition(reference, "Ready", "True", wait_periods=5) resource = k8s.get_resource(reference) assert resource["status"]["lastRequestedAccessString"] == new_access_string @@ -133,7 +133,7 @@ def test_user_password(self, user_password, user_password_input): (reference, resource) = user_password assert k8s.get_resource_exists(reference) - assert k8s.wait_on_condition(reference, "ACK.ResourceSynced", "True", wait_periods=5) + assert k8s.wait_on_condition(reference, "Ready", "True", wait_periods=5) resource = k8s.get_resource(reference) assert resource["status"]["authentication"] is not None assert resource["status"]["authentication"]["type_"] == "password" diff --git a/test/e2e/tests/test_usergroup.py b/test/e2e/tests/test_usergroup.py index 9c5bebec..3d4e8aa1 100644 --- a/test/e2e/tests/test_usergroup.py +++ b/test/e2e/tests/test_usergroup.py @@ -66,7 +66,7 @@ def user_group_create(get_user_group_yaml): class TestUserGroup: def test_user_group_create_update(self, user_group_create, get_user_group_yaml, bootstrap_resources): (reference, resource) = user_group_create - assert k8s.wait_on_condition(reference, "ACK.ResourceSynced", "True", wait_periods=15) + assert k8s.wait_on_condition(reference, "Ready", "True", wait_periods=15) # Update the usergroup to include one more user updated_user_group = get_user_group_yaml(reference.name) @@ -74,7 +74,7 @@ def test_user_group_create_update(self, user_group_create, get_user_group_yaml, k8s.patch_custom_resource(reference, updated_user_group) - assert k8s.wait_on_condition(reference, "ACK.ResourceSynced", "True", wait_periods=15) + assert k8s.wait_on_condition(reference, "Ready", "True", wait_periods=15) resource = k8s.get_resource(reference) assert len(resource["spec"]["userIDs"]) == 2 assert resource["status"]["status"] == "active"