diff --git a/pytest_tests/lib/helpers/aws_cli_client.py b/pytest_tests/lib/helpers/aws_cli_client.py index 6e9694a83..9c4ecada0 100644 --- a/pytest_tests/lib/helpers/aws_cli_client.py +++ b/pytest_tests/lib/helpers/aws_cli_client.py @@ -100,11 +100,16 @@ def list_objects_v2( MaxItems: Optional[int] = None, PageSize: Optional[int] = None, StartingToken: Optional[str] = None, + Prefix: Optional[str] = None, ) -> dict: common_flags = self.common_flags if MaxKeys or MaxItems or PageSize or StartingToken: common_flags = common_flags.replace("--no-paginate", "") - cmd = f"{aws_binary_path} {common_flags} s3api list-objects-v2 --bucket {Bucket} --endpoint {self.s3gate_endpoint}" + + prefix_ = "" + if Prefix: + prefix_ = f"--prefix {Prefix}" + cmd = f"{aws_binary_path} {common_flags} s3api list-objects-v2 --bucket {Bucket} {prefix_} --endpoint {self.s3gate_endpoint}" if MaxKeys: cmd += f" --max-keys {MaxKeys}" diff --git a/pytest_tests/lib/s3/s3_object.py b/pytest_tests/lib/s3/s3_object.py index f9a42a0e6..99e7a82a1 100644 --- a/pytest_tests/lib/s3/s3_object.py +++ b/pytest_tests/lib/s3/s3_object.py @@ -29,9 +29,11 @@ @allure.step("List objects S3 v2") -def list_objects_s3_v2(s3_client, bucket: str, full_output: bool = False) -> list: +def list_objects_s3_v2( + s3_client, bucket: str, full_output: bool = False, max_keys: int = None, prefix: str = None +) -> list: try: - response = s3_client.list_objects_v2(Bucket=bucket) + response = s3_client.list_objects_v2(Bucket=bucket, MaxKeys=max_keys, Prefix=prefix) content = response.get("Contents", []) log_command_execution("S3 v2 List objects result", response) obj_list = [] diff --git a/pytest_tests/tests/s3/test_s3_object.py b/pytest_tests/tests/s3/test_s3_object.py index f13dd5add..b73865273 100644 --- a/pytest_tests/tests/s3/test_s3_object.py +++ b/pytest_tests/tests/s3/test_s3_object.py @@ -1,3 +1,4 @@ +import logging import os import string import time @@ -27,6 +28,8 @@ from s3 import s3_bucket, s3_object from s3.s3_base import TestNeofsS3Base +logger = logging.getLogger("NeoLogger") + def pytest_generate_tests(metafunc): parametrize_clients(metafunc) @@ -951,3 +954,36 @@ def test_s3_internal_slicer(self, bucket): self.neofs_env.s3_gw.stop() self.neofs_env.s3_gw.internal_slicer = False self.neofs_env.s3_gw.start(fresh=False) + + @pytest.mark.aws_cli_only + @pytest.mark.simple + def test_s3_large_object_listings( + self, + ): + bucket = s3_bucket.create_bucket_s3(self.s3_client, bucket_configuration="rep-1") + + file_path = generate_file(16) + prefix_a = "/patha/" + prefix_b = "/pathb/" + + for idx in range(1001): + s3_object.put_object_s3(self.s3_client, bucket, file_path, f"{prefix_a}file_{idx}") + + for idx in range(1001): + s3_object.put_object_s3(self.s3_client, bucket, file_path, f"{prefix_b}file_{idx}") + + list_obj = s3_object.list_objects_s3_v2(self.s3_client, bucket, max_keys=2002, prefix="/") + assert len(list_obj) == 2002, "invalid number of objects" + + unique_objects = set(list_obj) + if len(unique_objects) != len(list_obj): + duplicates = [obj for obj in list_obj if list_obj.count(obj) > 1] + unique_duplicates = list(set(duplicates)) + logger.info(f"Found {len(list_obj) - len(unique_objects)} duplicate objects in listing:") + for duplicate in unique_duplicates: + count = list_obj.count(duplicate) + logger.info(f" '{duplicate}' appears {count} times") + + assert len(unique_objects) == len(list_obj), ( + f"there are duplicates in objects listing output: {unique_duplicates}" + )