diff --git a/.evergreen/config_generator/components/abi_compliance_check.py b/.evergreen/config_generator/components/abi_compliance_check.py index a1cf44c808a..6c53ea0e25e 100644 --- a/.evergreen/config_generator/components/abi_compliance_check.py +++ b/.evergreen/config_generator/components/abi_compliance_check.py @@ -1,9 +1,7 @@ -from shrub.v3.evg_command import EvgCommandType -from shrub.v3.evg_command import s3_put +from shrub.v3.evg_command import EvgCommandType, s3_put from shrub.v3.evg_task import EvgTask from config_generator.components.funcs.install_uv import InstallUV - from config_generator.etc.function import Function from config_generator.etc.utils import bash_exec diff --git a/.evergreen/config_generator/components/c_std_compile.py b/.evergreen/config_generator/components/c_std_compile.py index 232965628c9..d70aee0c361 100644 --- a/.evergreen/config_generator/components/c_std_compile.py +++ b/.evergreen/config_generator/components/c_std_compile.py @@ -3,14 +3,10 @@ from shrub.v3.evg_task import EvgTask, EvgTaskRef from config_generator.components.funcs.install_uv import InstallUV - -from config_generator.etc.distros import find_large_distro -from config_generator.etc.distros import make_distro_str -from config_generator.etc.distros import compiler_to_vars +from config_generator.etc.distros import compiler_to_vars, find_large_distro, make_distro_str from config_generator.etc.function import Function from config_generator.etc.utils import bash_exec - TAG = 'std-matrix' @@ -95,10 +91,7 @@ def tasks(): name=task_name, run_on=distro.name, tags=tags + [f'std-c{std}'], - commands=[ - InstallUV.call(), - StdCompile.call(vars=compile_vars | with_std) - ], + commands=[InstallUV.call(), StdCompile.call(vars=compile_vars | with_std)], ) ) diff --git a/.evergreen/config_generator/components/clang_format.py b/.evergreen/config_generator/components/clang_format.py index 2fd9778e537..04e8833a9b1 100644 --- a/.evergreen/config_generator/components/clang_format.py +++ b/.evergreen/config_generator/components/clang_format.py @@ -1,15 +1,13 @@ from shrub.v3.evg_build_variant import BuildVariant from shrub.v3.evg_command import EvgCommandType -from shrub.v3.evg_task import EvgTask -from shrub.v3.evg_task import EvgTaskRef +from shrub.v3.evg_task import EvgTask, EvgTaskRef from config_generator.components.funcs.install_uv import InstallUV from config_generator.etc.distros import find_small_distro from config_generator.etc.function import Function from config_generator.etc.utils import bash_exec - -TAG = "clang-format" +TAG = 'clang-format' class ClangFormat(Function): @@ -17,9 +15,9 @@ class ClangFormat(Function): commands = [ bash_exec( command_type=EvgCommandType.TEST, - working_dir="mongoc", + working_dir='mongoc', env={ - "DRYRUN": "1", + 'DRYRUN': '1', }, script='PATH="${UV_INSTALL_DIR}:$PATH" uv run --frozen --only-group=format tools/format.py --mode=check', ), @@ -46,7 +44,7 @@ def variants(): BuildVariant( name=TAG, display_name=TAG, - run_on=[find_small_distro("ubuntu2204").name], - tasks=[EvgTaskRef(name=f".{TAG}")], + run_on=[find_small_distro('ubuntu2204').name], + tasks=[EvgTaskRef(name=f'.{TAG}')], ), ] diff --git a/.evergreen/config_generator/components/cse/darwinssl.py b/.evergreen/config_generator/components/cse/darwinssl.py index a3819b4e858..c593e942eb5 100644 --- a/.evergreen/config_generator/components/cse/darwinssl.py +++ b/.evergreen/config_generator/components/cse/darwinssl.py @@ -2,11 +2,9 @@ from shrub.v3.evg_task import EvgTaskRef from config_generator.etc.compile import generate_compile_tasks - from config_generator.etc.cse.compile import CompileCommon from config_generator.etc.cse.test import generate_test_tasks - SSL = 'darwinssl' TAG = f'cse-matrix-{SSL}' @@ -52,9 +50,7 @@ def tasks(): MORE_TAGS = ['cse'] - res += generate_compile_tasks( - SSL, TAG, SASL_TO_FUNC, COMPILE_MATRIX, MORE_TAGS - ) + res += generate_compile_tasks(SSL, TAG, SASL_TO_FUNC, COMPILE_MATRIX, MORE_TAGS) res += generate_test_tasks(SSL, TAG, TEST_MATRIX) diff --git a/.evergreen/config_generator/components/cse/openssl.py b/.evergreen/config_generator/components/cse/openssl.py index 8dbeb8176c9..8e33bd6b793 100644 --- a/.evergreen/config_generator/components/cse/openssl.py +++ b/.evergreen/config_generator/components/cse/openssl.py @@ -1,12 +1,10 @@ from shrub.v3.evg_build_variant import BuildVariant from config_generator.etc.compile import generate_compile_tasks -from config_generator.etc.function import merge_defns -from config_generator.etc.utils import TaskRef - from config_generator.etc.cse.compile import CompileCommon from config_generator.etc.cse.test import generate_test_tasks - +from config_generator.etc.function import merge_defns +from config_generator.etc.utils import TaskRef SSL = 'openssl' TAG = f'cse-matrix-{SSL}' @@ -83,7 +81,7 @@ def tasks(): # PowerPC and zSeries are limited resources. for task in res: - if any(pattern in task.run_on for pattern in ["power", "zseries"]): + if any(pattern in task.run_on for pattern in ['power', 'zseries']): task.patchable = False return res @@ -98,11 +96,11 @@ def variants(): # PowerPC and zSeries are limited resources. for task in TASKS: - if any(pattern in task.run_on for pattern in ["power", "zseries"]): + if any(pattern in task.run_on for pattern in ['power', 'zseries']): tasks.append( TaskRef( name=task.name, - batchtime=1440, # 1 day + batchtime=1440, # 1 day ) ) else: diff --git a/.evergreen/config_generator/components/cse/winssl.py b/.evergreen/config_generator/components/cse/winssl.py index f9281975260..995a268720a 100644 --- a/.evergreen/config_generator/components/cse/winssl.py +++ b/.evergreen/config_generator/components/cse/winssl.py @@ -2,11 +2,9 @@ from shrub.v3.evg_task import EvgTaskRef from config_generator.etc.compile import generate_compile_tasks - from config_generator.etc.cse.compile import CompileCommon from config_generator.etc.cse.test import generate_test_tasks - SSL = 'winssl' TAG = f'cse-matrix-{SSL}' @@ -50,9 +48,7 @@ def tasks(): MORE_TAGS = ['cse'] - res += generate_compile_tasks( - SSL, TAG, SASL_TO_FUNC, COMPILE_MATRIX, MORE_TAGS - ) + res += generate_compile_tasks(SSL, TAG, SASL_TO_FUNC, COMPILE_MATRIX, MORE_TAGS) res += generate_test_tasks(SSL, TAG, TEST_MATRIX) diff --git a/.evergreen/config_generator/components/earthly.py b/.evergreen/config_generator/components/earthly.py index 683be59728c..f38aba8cf74 100644 --- a/.evergreen/config_generator/components/earthly.py +++ b/.evergreen/config_generator/components/earthly.py @@ -19,62 +19,62 @@ from ..etc.utils import all_possible -T = TypeVar("T") +T = TypeVar('T') -_ENV_PARAM_NAME = "MONGOC_EARTHLY_ENV" -_CC_PARAM_NAME = "MONGOC_EARTHLY_C_COMPILER" -"The name of the EVG expansion for the Earthly c_compiler argument" +_ENV_PARAM_NAME = 'MONGOC_EARTHLY_ENV' +_CC_PARAM_NAME = 'MONGOC_EARTHLY_C_COMPILER' +'The name of the EVG expansion for the Earthly c_compiler argument' EnvKey = Literal[ - "u20", - "u22", - "almalinux8", - "almalinux9", - "almalinux10", - "alpine3.19", - "alpine3.20", - "alpine3.21", - "alpine3.22", - "archlinux", - "centos9", - "centos10", + 'u20', + 'u22', + 'almalinux8', + 'almalinux9', + 'almalinux10', + 'alpine3.19', + 'alpine3.20', + 'alpine3.21', + 'alpine3.22', + 'archlinux', + 'centos9', + 'centos10', ] "Identifiers for environments. These correspond to special 'env.*' targets in the Earthfile." -CompilerName = Literal["gcc", "clang"] -"The name of the compiler program that is used for the build. Passed via --c_compiler to Earthly." +CompilerName = Literal['gcc', 'clang'] +'The name of the compiler program that is used for the build. Passed via --c_compiler to Earthly.' # Other options: SSPI (Windows only), AUTO (not reliably test-able without more environments) -SASLOption = Literal["Cyrus", "off"] -"Valid options for the SASL configuration parameter" -TLSOption = Literal["OpenSSL", "off"] +SASLOption = Literal['Cyrus', 'off'] +'Valid options for the SASL configuration parameter' +TLSOption = Literal['OpenSSL', 'off'] "Options for the TLS backend configuration parameter (AKA 'ENABLE_SSL')" -CxxVersion = Literal["master", "r4.1.0", "none"] -"C++ driver refs that are under CI test" +CxxVersion = Literal['master', 'r4.1.0', 'none'] +'C++ driver refs that are under CI test' # A separator character, since we cannot use whitespace -_SEPARATOR = "\N{NO-BREAK SPACE}\N{BULLET}\N{NO-BREAK SPACE}" +_SEPARATOR = '\N{NO-BREAK SPACE}\N{BULLET}\N{NO-BREAK SPACE}' def os_split(env: EnvKey) -> tuple[str, None | str]: """Convert the environment key into a pretty name+version pair""" match env: # Match 'alpine3.18' 'alpine53.123' etc. - case alp if mat := re.match(r"alpine(\d+\.\d+)", alp): - return ("Alpine", mat[1]) - case "archlinux": - return "ArchLinux", None + case alp if mat := re.match(r'alpine(\d+\.\d+)', alp): + return ('Alpine', mat[1]) + case 'archlinux': + return 'ArchLinux', None # Match 'u22', 'u20', 'u71' etc. - case ubu if mat := re.match(r"u(\d\d)", ubu): - return "Ubuntu", f"{mat[1]}.04" + case ubu if mat := re.match(r'u(\d\d)', ubu): + return 'Ubuntu', f'{mat[1]}.04' # Match 'centos9', 'centos10', etc. - case cent if mat := re.match(r"centos(\d+)", cent): - return "CentOS", f"{mat[1]}" + case cent if mat := re.match(r'centos(\d+)', cent): + return 'CentOS', f'{mat[1]}' # Match 'almalinux8', 'almalinux10', etc. - case alm if mat := re.match(r"almalinux(\d+)", alm): - return "AlmaLinux", f"{mat[1]}" + case alm if mat := re.match(r'almalinux(\d+)', alm): + return 'AlmaLinux', f'{mat[1]}' case _: - raise ValueError(f"Failed to split OS env key {env=} into a name+version pair (unrecognized)") + raise ValueError(f'Failed to split OS env key {env=} into a name+version pair (unrecognized)') class EarthlyVariant(NamedTuple): @@ -95,14 +95,14 @@ def display_name(self) -> str: case name, None: base = name case name, version: - base = f"{name} {version}" + base = f'{name} {version}' toolchain: str match self.c_compiler: - case "clang": - toolchain = "LLVM/Clang" - case "gcc": - toolchain = "GCC" - return f"{base} ({toolchain})" + case 'clang': + toolchain = 'LLVM/Clang' + case 'gcc': + toolchain = 'GCC' + return f'{base} ({toolchain})' @property def task_selector_tag(self) -> str: @@ -110,7 +110,7 @@ def task_selector_tag(self) -> str: The task tag that is used to select the tasks that want to run on this variant. """ - return f"{self.env}-{self.c_compiler}" + return f'{self.env}-{self.c_compiler}' @property def expansions(self) -> Mapping[str, str]: @@ -125,8 +125,8 @@ def expansions(self) -> Mapping[str, str]: def as_evg_variant(self) -> BuildVariant: return BuildVariant( - name=f"{self.task_selector_tag}", - tasks=[EvgTaskRef(name=f".{self.task_selector_tag}")], + name=f'{self.task_selector_tag}', + tasks=[EvgTaskRef(name=f'.{self.task_selector_tag}')], display_name=self.display_name, expansions=dict(self.expansions), ) @@ -148,28 +148,28 @@ class Configuration(NamedTuple): @property def suffix(self) -> str: - return _SEPARATOR.join(f"{k}={v}" for k, v in self._asdict().items()) + return _SEPARATOR.join(f'{k}={v}' for k, v in self._asdict().items()) # Authenticate with DevProd-provided Amazon ECR instance to use as pull-through cache for DockerHub. class DockerLoginAmazonECR(Function): - name = "docker-login-amazon-ecr" + name = 'docker-login-amazon-ecr' commands = [ # Avoid inadvertently using a pre-existing and potentially conflicting Docker config. - expansions_update(updates=[KeyValueParam(key="DOCKER_CONFIG", value="${workdir}/.docker")]), - ec2_assume_role(role_arn="arn:aws:iam::901841024863:role/ecr-role-evergreen-ro"), + expansions_update(updates=[KeyValueParam(key='DOCKER_CONFIG', value='${workdir}/.docker')]), + ec2_assume_role(role_arn='arn:aws:iam::901841024863:role/ecr-role-evergreen-ro'), subprocess_exec( - binary="bash", + binary='bash', command_type=EvgCommandType.SETUP, include_expansions_in_env=[ - "AWS_ACCESS_KEY_ID", - "AWS_SECRET_ACCESS_KEY", - "AWS_SESSION_TOKEN", - "DOCKER_CONFIG", + 'AWS_ACCESS_KEY_ID', + 'AWS_SECRET_ACCESS_KEY', + 'AWS_SESSION_TOKEN', + 'DOCKER_CONFIG', ], args=[ - "-c", - "aws ecr get-login-password --region us-east-1 | docker login --username AWS --password-stdin 901841024863.dkr.ecr.us-east-1.amazonaws.com", + '-c', + 'aws ecr get-login-password --region us-east-1 | docker login --username AWS --password-stdin 901841024863.dkr.ecr.us-east-1.amazonaws.com', ], ), ] @@ -195,7 +195,7 @@ def variants_for(config: Configuration) -> Iterable[EarthlyVariant]: def earthly_exec( *, - kind: Literal["test", "setup", "system"], + kind: Literal['test', 'setup', 'system'], target: str, secrets: Mapping[str, str] | None = None, args: Mapping[str, str] | None = None, @@ -203,20 +203,20 @@ def earthly_exec( """Create a subprocess_exec command that runs Earthly with the given arguments""" env: dict[str, str] = {k: v for k, v in (secrets or {}).items()} return subprocess_exec( - "./tools/earthly.sh", + './tools/earthly.sh', args=[ # Use Amazon ECR as pull-through cache for DockerHub to avoid rate limits. - "--buildkit-image=901841024863.dkr.ecr.us-east-1.amazonaws.com/dockerhub/earthly/buildkitd:v0.8.3", - *(f"--secret={k}" for k in (secrets or ())), - f"+{target}", + '--buildkit-image=901841024863.dkr.ecr.us-east-1.amazonaws.com/dockerhub/earthly/buildkitd:v0.8.3', + *(f'--secret={k}' for k in (secrets or ())), + f'+{target}', # Use Amazon ECR as pull-through cache for DockerHub to avoid rate limits. - "--default_search_registry=901841024863.dkr.ecr.us-east-1.amazonaws.com/dockerhub", - *(f"--{arg}={val}" for arg, val in (args or {}).items()), + '--default_search_registry=901841024863.dkr.ecr.us-east-1.amazonaws.com/dockerhub', + *(f'--{arg}={val}' for arg, val in (args or {}).items()), ], command_type=EvgCommandType(kind), - include_expansions_in_env=["DOCKER_CONFIG"], + include_expansions_in_env=['DOCKER_CONFIG'], env=env if env else None, - working_dir="mongoc", + working_dir='mongoc', ) @@ -243,8 +243,8 @@ def earthly_task( earthly_args = config._asdict() earthly_args |= { # Add arguments that come from parameter expansions defined in the build variant - "env": f"${{{_ENV_PARAM_NAME}}}", - "c_compiler": f"${{{_CC_PARAM_NAME}}}", + 'env': f'${{{_ENV_PARAM_NAME}}}', + 'c_compiler': f'${{{_CC_PARAM_NAME}}}', } return EvgTask( name=name, @@ -255,29 +255,29 @@ def earthly_task( # for timing and logging purposes. The subequent build step will cache-hit the # warmed-up build environments. earthly_exec( - kind="setup", - target="env-warmup", + kind='setup', + target='env-warmup', args=earthly_args, ), # Now execute the main tasks: earthly_exec( - kind="test", - target="run", + kind='test', + target='run', # The "targets" arg is for +run to specify which targets to run - args={"targets": " ".join(targets)} | earthly_args, + args={'targets': ' '.join(targets)} | earthly_args, ), ], # type: ignore (The type annots on `commands` is wrong) - tags=["earthly", "pr-merge-gate", *env_tags], + tags=['earthly', 'pr-merge-gate', *env_tags], run_on=CONTAINER_RUN_DISTROS, ) CONTAINER_RUN_DISTROS = [ - "amazon2", - "debian11-large", - "debian12-large", - "ubuntu2204-large", - "ubuntu2404-large", + 'amazon2', + 'debian11-large', + 'debian12-large', + 'ubuntu2204-large', + 'ubuntu2404-large', ] @@ -288,14 +288,14 @@ def functions(): def tasks() -> Iterable[EvgTask]: for conf in all_possible(Configuration): # test-example is a target in all configurations - targets = ["test-example"] + targets = ['test-example'] # test-cxx-driver is only a target in configurations with specified mongocxx versions - if conf.test_mongocxx_ref != "none": - targets.append("test-cxx-driver") + if conf.test_mongocxx_ref != 'none': + targets.append('test-cxx-driver') task = earthly_task( - name=f"check:{conf.suffix}", + name=f'check:{conf.suffix}', targets=targets, config=conf, ) @@ -303,12 +303,12 @@ def tasks() -> Iterable[EvgTask]: yield task yield EvgTask( - name="verify-headers", + name='verify-headers', commands=[ DockerLoginAmazonECR.call(), - earthly_exec(kind="test", target="verify-headers"), + earthly_exec(kind='test', target='verify-headers'), ], - tags=["pr-merge-gate"], + tags=['pr-merge-gate'], run_on=CONTAINER_RUN_DISTROS, ) diff --git a/.evergreen/config_generator/components/funcs/bootstrap_mongo_orchestration.py b/.evergreen/config_generator/components/funcs/bootstrap_mongo_orchestration.py index d371571dd10..ab0bc59e8ab 100644 --- a/.evergreen/config_generator/components/funcs/bootstrap_mongo_orchestration.py +++ b/.evergreen/config_generator/components/funcs/bootstrap_mongo_orchestration.py @@ -1,5 +1,4 @@ -from shrub.v3.evg_command import EvgCommandType -from shrub.v3.evg_command import expansions_update +from shrub.v3.evg_command import EvgCommandType, expansions_update from config_generator.etc.function import Function from config_generator.etc.utils import bash_exec @@ -15,10 +14,7 @@ class BootstrapMongoOrchestration(Function): script='.evergreen/scripts/integration-tests.sh', add_expansions_to_env=True, ), - expansions_update( - command_type=command_type, - file='mongoc/mo-expansion.yml' - ), + expansions_update(command_type=command_type, file='mongoc/mo-expansion.yml'), ] diff --git a/.evergreen/config_generator/components/funcs/fetch_build.py b/.evergreen/config_generator/components/funcs/fetch_build.py index fcd29e3495e..9e9fad98e1e 100644 --- a/.evergreen/config_generator/components/funcs/fetch_build.py +++ b/.evergreen/config_generator/components/funcs/fetch_build.py @@ -1,6 +1,4 @@ -from shrub.v3.evg_command import archive_targz_extract -from shrub.v3.evg_command import EvgCommandType -from shrub.v3.evg_command import s3_get +from shrub.v3.evg_command import EvgCommandType, archive_targz_extract, s3_get from config_generator.etc.function import Function from config_generator.etc.utils import bash_exec @@ -24,11 +22,11 @@ class FetchBuild(Function): bash_exec( command_type=command_type, working_dir='mongoc', - script='''\ + script="""\ for file in $(find .evergreen/scripts -type f); do chmod +rx "$file" || exit done - ''' + """, ), ] diff --git a/.evergreen/config_generator/components/funcs/fetch_det.py b/.evergreen/config_generator/components/funcs/fetch_det.py index 0c45752b889..71db4d30fea 100644 --- a/.evergreen/config_generator/components/funcs/fetch_det.py +++ b/.evergreen/config_generator/components/funcs/fetch_det.py @@ -1,5 +1,4 @@ from shrub.v3.evg_command import EvgCommandType -from shrub.v3.evg_command import expansions_update from config_generator.etc.function import Function from config_generator.etc.utils import bash_exec @@ -10,17 +9,16 @@ class FetchDET(Function): commands = [ bash_exec( command_type=EvgCommandType.SETUP, - script='''\ + script="""\ if [[ ! -d drivers-evergreen-tools ]]; then git clone --depth=1 https://github.com/mongodb-labs/drivers-evergreen-tools.git fi - ''', + """, ), - # Make shell scripts executable. bash_exec( command_type=EvgCommandType.SETUP, - working_dir="drivers-evergreen-tools", + working_dir='drivers-evergreen-tools', script='find .evergreen -type f -name "*.sh" -exec chmod +rx "{}" \;', ), ] diff --git a/.evergreen/config_generator/components/funcs/fetch_source.py b/.evergreen/config_generator/components/funcs/fetch_source.py index 81ff94983df..f8dd519803e 100644 --- a/.evergreen/config_generator/components/funcs/fetch_source.py +++ b/.evergreen/config_generator/components/funcs/fetch_source.py @@ -1,6 +1,4 @@ -from shrub.v3.evg_command import EvgCommandType -from shrub.v3.evg_command import expansions_update -from shrub.v3.evg_command import git_get_project +from shrub.v3.evg_command import EvgCommandType, expansions_update, git_get_project from config_generator.etc.function import Function from config_generator.etc.utils import bash_exec @@ -14,7 +12,7 @@ class FetchSource(Function): bash_exec( command_type=command_type, working_dir='mongoc', - script='''\ + script="""\ set -o errexit set -o pipefail if [ -n "${github_pr_number}" -o "${is_patch}" = "true" ]; then @@ -23,19 +21,18 @@ class FetchSource(Function): VERSION=latest fi echo "CURRENT_VERSION: $VERSION" > expansion.yml - ''' + """, ), - expansions_update(command_type=command_type, - file='mongoc/expansion.yml'), + expansions_update(command_type=command_type, file='mongoc/expansion.yml'), # Scripts may not be executable on Windows. bash_exec( command_type=EvgCommandType.SETUP, working_dir='mongoc', - script='''\ + script="""\ for file in $(find .evergreen/scripts -type f); do chmod +rx "$file" || exit done - ''' + """, ), ] diff --git a/.evergreen/config_generator/components/funcs/install_uv.py b/.evergreen/config_generator/components/funcs/install_uv.py index decf0a66252..b167beb1902 100644 --- a/.evergreen/config_generator/components/funcs/install_uv.py +++ b/.evergreen/config_generator/components/funcs/install_uv.py @@ -1,17 +1,16 @@ -from config_generator.components.funcs.set_cache_dir import SetCacheDir +from shrub.v3.evg_command import EvgCommandType, expansions_update +from config_generator.components.funcs.set_cache_dir import SetCacheDir from config_generator.etc.function import Function from config_generator.etc.utils import bash_exec -from shrub.v3.evg_command import EvgCommandType, expansions_update - class InstallUV(Function): name = 'install-uv' commands = SetCacheDir.commands + [ bash_exec( command_type=EvgCommandType.SETUP, - script='''\ + script="""\ set -o errexit set -o pipefail @@ -44,7 +43,7 @@ class InstallUV(Function): PATH="$uv_install_dir:$PATH" uv --version printf "UV_INSTALL_DIR: %s\\n" "$uv_install_dir" >|expansions.uv.yml - ''', + """, ), expansions_update( command_type=EvgCommandType.SETUP, diff --git a/.evergreen/config_generator/components/funcs/restore_instance_profile.py b/.evergreen/config_generator/components/funcs/restore_instance_profile.py index 3d79e75c025..2b320513b93 100644 --- a/.evergreen/config_generator/components/funcs/restore_instance_profile.py +++ b/.evergreen/config_generator/components/funcs/restore_instance_profile.py @@ -6,7 +6,7 @@ class RestoreInstanceProfile(Function): name = 'restore-instance-profile' commands = [ bash_exec( - script='''\ + script="""\ # Restore the AWS Instance Profile that may have been removed in AWS tasks. if [[ ! -d drivers-evergreen-tools ]]; then @@ -30,7 +30,7 @@ class RestoreInstanceProfile(Function): exit 1 fi echo "restoring instance profile ... succeeded" - ''' + """ ), ] diff --git a/.evergreen/config_generator/components/funcs/run_mock_kms_servers.py b/.evergreen/config_generator/components/funcs/run_mock_kms_servers.py index 72e9d942975..8bed661a523 100644 --- a/.evergreen/config_generator/components/funcs/run_mock_kms_servers.py +++ b/.evergreen/config_generator/components/funcs/run_mock_kms_servers.py @@ -16,7 +16,7 @@ class RunMockKMSServers(Function): bash_exec( command_type=command_type, working_dir='drivers-evergreen-tools/.evergreen/csfle', - script='''\ + script="""\ set -o errexit echo "Preparing KMS TLS venv environment..." # TODO: remove this function along with the "run kms servers" function. @@ -33,13 +33,13 @@ class RunMockKMSServers(Function): deactivate fi echo "Preparing KMS TLS venv environment... done." - ''', + """, ), bash_exec( command_type=command_type, background=True, working_dir='drivers-evergreen-tools/.evergreen/csfle', - script='''\ + script="""\ set -o errexit echo "Starting mock KMS TLS servers..." . ./activate-kmstlsvenv.sh @@ -51,7 +51,7 @@ class RunMockKMSServers(Function): python -u kms_kmip_server.py & deactivate echo "Starting mock KMS TLS servers... done." - ''', + """, ), ] diff --git a/.evergreen/config_generator/components/funcs/run_simple_http_server.py b/.evergreen/config_generator/components/funcs/run_simple_http_server.py index 4ef341efaee..614c717add6 100644 --- a/.evergreen/config_generator/components/funcs/run_simple_http_server.py +++ b/.evergreen/config_generator/components/funcs/run_simple_http_server.py @@ -13,12 +13,12 @@ class RunSimpleHTTPServer(Function): background=True, working_dir='mongoc', include_expansions_in_env=['UV_INSTALL_DIR'], - script='''\ + script="""\ set -o errexit echo "Starting simple HTTP server..." PATH="${UV_INSTALL_DIR}:$PATH" uvx python .evergreen/scripts/simple_http_server.py echo "Starting simple HTTP server... done." - ''', + """, ), ] diff --git a/.evergreen/config_generator/components/funcs/set_cache_dir.py b/.evergreen/config_generator/components/funcs/set_cache_dir.py index cd8d114c05d..e0584d3786e 100644 --- a/.evergreen/config_generator/components/funcs/set_cache_dir.py +++ b/.evergreen/config_generator/components/funcs/set_cache_dir.py @@ -1,15 +1,15 @@ +from shrub.v3.evg_command import EvgCommandType, expansions_update + from config_generator.etc.function import Function from config_generator.etc.utils import bash_exec -from shrub.v3.evg_command import EvgCommandType, expansions_update - class SetCacheDir(Function): name = 'set-cache-dir' commands = [ bash_exec( command_type=EvgCommandType.SETUP, - script='''\ + script="""\ if [[ -n "$XDG_CACHE_DIR" ]]; then cache_dir="$XDG_CACHE_DIR" # XDG Base Directory specification. elif [[ -n "$LOCALAPPDATA" ]]; then @@ -30,12 +30,9 @@ class SetCacheDir(Function): cache_dir="$(cd "$cache_dir/mongo-c-driver" && pwd)" || exit printf "MONGO_C_DRIVER_CACHE_DIR: %s\\n" "$cache_dir" >|expansions.set-cache-dir.yml - ''', - ), - expansions_update( - command_type=EvgCommandType.SETUP, - file='expansions.set-cache-dir.yml' + """, ), + expansions_update(command_type=EvgCommandType.SETUP, file='expansions.set-cache-dir.yml'), ] diff --git a/.evergreen/config_generator/components/funcs/stop_load_balancer.py b/.evergreen/config_generator/components/funcs/stop_load_balancer.py index 5e867948ffe..a1d0f5030c4 100644 --- a/.evergreen/config_generator/components/funcs/stop_load_balancer.py +++ b/.evergreen/config_generator/components/funcs/stop_load_balancer.py @@ -6,7 +6,7 @@ class StopLoadBalancer(Function): name = 'stop-load-balancer' commands = [ bash_exec( - script='''\ + script="""\ # Only run if a load balancer was started. if [[ -z "${SINGLE_MONGOS_LB_URI}" ]]; then echo "OK - no load balancer running" @@ -15,7 +15,7 @@ class StopLoadBalancer(Function): if [[ -d drivers-evergreen-tools ]]; then cd drivers-evergreen-tools && .evergreen/run-load-balancer.sh stop fi - ''' + """ ), ] diff --git a/.evergreen/config_generator/components/funcs/stop_mongo_orchestration.py b/.evergreen/config_generator/components/funcs/stop_mongo_orchestration.py index 0e549e2a7cc..fb3126695f8 100644 --- a/.evergreen/config_generator/components/funcs/stop_mongo_orchestration.py +++ b/.evergreen/config_generator/components/funcs/stop_mongo_orchestration.py @@ -6,11 +6,11 @@ class StopMongoOrchestration(Function): name = 'stop-mongo-orchestration' commands = [ bash_exec( - script='''\ + script="""\ if [[ -d MO ]]; then cd MO && mongo-orchestration stop fi - ''' + """ ), ] diff --git a/.evergreen/config_generator/components/funcs/upload_build.py b/.evergreen/config_generator/components/funcs/upload_build.py index c0c3f58e277..9c852354d2a 100644 --- a/.evergreen/config_generator/components/funcs/upload_build.py +++ b/.evergreen/config_generator/components/funcs/upload_build.py @@ -1,6 +1,6 @@ +from shrub.v3.evg_command import archive_targz_pack, s3_put + from config_generator.etc.function import Function -from shrub.v3.evg_command import archive_targz_pack -from shrub.v3.evg_command import s3_put class UploadBuild(Function): diff --git a/.evergreen/config_generator/components/funcs/upload_mo_artifacts.py b/.evergreen/config_generator/components/funcs/upload_mo_artifacts.py index e9a4def310e..06bf736221c 100644 --- a/.evergreen/config_generator/components/funcs/upload_mo_artifacts.py +++ b/.evergreen/config_generator/components/funcs/upload_mo_artifacts.py @@ -1,5 +1,4 @@ -from shrub.v3.evg_command import archive_targz_pack -from shrub.v3.evg_command import s3_put +from shrub.v3.evg_command import archive_targz_pack, s3_put from config_generator.etc.function import Function from config_generator.etc.utils import bash_exec @@ -10,7 +9,7 @@ class UploadMOArtifacts(Function): commands = [ bash_exec( working_dir='mongoc', - script='''\ + script="""\ set -o errexit declare dir="MO" if [[ -d "/cygdrive/c/data/mo" ]]; then @@ -19,7 +18,7 @@ class UploadMOArtifacts(Function): if [[ -d "$dir" ]]; then find "$dir" -name \\*.log | xargs tar czf mongodb-logs.tar.gz fi - ''' + """, ), s3_put( aws_key='${aws_key}', @@ -45,7 +44,7 @@ class UploadMOArtifacts(Function): ), bash_exec( working_dir='mongoc', - script='''\ + script="""\ set -o errexit # Find all core files from mongodb in orchestration and move to mongoc declare dir="MO" @@ -64,7 +63,7 @@ class UploadMOArtifacts(Function): mv "$core_file" . fi done - ''' + """, ), archive_targz_pack( target='mongo-coredumps.tgz', diff --git a/.evergreen/config_generator/components/funcs/upload_test_results.py b/.evergreen/config_generator/components/funcs/upload_test_results.py index dc756a2d92f..1084df056e9 100644 --- a/.evergreen/config_generator/components/funcs/upload_test_results.py +++ b/.evergreen/config_generator/components/funcs/upload_test_results.py @@ -9,14 +9,12 @@ class UploadTestResults(Function): commands = [ # Ensure attach_results does not fail even if no tests results exist. bash_exec( - script='''\ + script="""\ mkdir -p mongoc touch mongoc/test-results.json - ''' - ), - attach_results( - file_location='mongoc/test-results.json' + """ ), + attach_results(file_location='mongoc/test-results.json'), ] diff --git a/.evergreen/config_generator/components/kms_divergence_check.py b/.evergreen/config_generator/components/kms_divergence_check.py index 6cc7736d5d4..c927bf80c24 100644 --- a/.evergreen/config_generator/components/kms_divergence_check.py +++ b/.evergreen/config_generator/components/kms_divergence_check.py @@ -1,5 +1,4 @@ from shrub.v3.evg_command import EvgCommandType -from shrub.v3.evg_command import s3_put from shrub.v3.evg_task import EvgTask from config_generator.etc.function import Function @@ -7,12 +6,12 @@ class KmsDivergenceCheck(Function): - name = "kms-divergence-check" + name = 'kms-divergence-check' commands = [ bash_exec( command_type=EvgCommandType.TEST, - working_dir="mongoc", - script=".evergreen/scripts/kms-divergence-check.sh" + working_dir='mongoc', + script='.evergreen/scripts/kms-divergence-check.sh', ), ] diff --git a/.evergreen/config_generator/components/loadbalanced.py b/.evergreen/config_generator/components/loadbalanced.py index 111e6734c5a..e64c30c67d3 100644 --- a/.evergreen/config_generator/components/loadbalanced.py +++ b/.evergreen/config_generator/components/loadbalanced.py @@ -1,6 +1,6 @@ from shrub.v3.evg_build_variant import BuildVariant from shrub.v3.evg_command import EvgCommandType, FunctionCall, expansions_update -from shrub.v3.evg_task import EvgTask, EvgTaskRef, EvgTaskDependency +from shrub.v3.evg_task import EvgTask, EvgTaskDependency, EvgTaskRef from config_generator.components.funcs.bootstrap_mongo_orchestration import BootstrapMongoOrchestration from config_generator.components.funcs.fetch_build import FetchBuild @@ -9,12 +9,12 @@ from config_generator.components.funcs.run_simple_http_server import RunSimpleHTTPServer from config_generator.components.funcs.run_tests import RunTests from config_generator.components.funcs.upload_build import UploadBuild -from config_generator.etc.distros import make_distro_str, find_small_distro, find_large_distro +from config_generator.etc.distros import find_large_distro, make_distro_str from config_generator.etc.utils import bash_exec # Use `rhel8-latest` distro. `rhel8-latest` distro includes necessary dependency: `haproxy`. -_DISTRO_NAME = "rhel8-latest" -_COMPILER = "gcc" +_DISTRO_NAME = 'rhel8-latest' +_COMPILER = 'gcc' def functions(): @@ -22,51 +22,52 @@ def functions(): 'start-load-balancer': [ bash_exec( command_type=EvgCommandType.SETUP, - script='''\ + script="""\ export DRIVERS_TOOLS=./drivers-evergreen-tools export MONGODB_URI="${MONGODB_URI}" $DRIVERS_TOOLS/.evergreen/run-load-balancer.sh start - ''', + """, ), expansions_update( command_type=EvgCommandType.SETUP, file='lb-expansion.yml', - ) + ), ] } def make_test_task(auth: bool, ssl: bool, server_version: str): - auth_str = "auth" if auth else "noauth" - ssl_str = "openssl" if ssl else "nossl" + auth_str = 'auth' if auth else 'noauth' + ssl_str = 'openssl' if ssl else 'nossl' distro_str = make_distro_str(_DISTRO_NAME, _COMPILER, None) return EvgTask( - name=f"loadbalanced-{distro_str}-test-{server_version}-{auth_str}-{ssl_str}", - depends_on=[EvgTaskDependency( - name=f"loadbalanced-{distro_str}-compile")], - run_on=find_large_distro(_DISTRO_NAME).name, # DEVPROD-18763 + name=f'loadbalanced-{distro_str}-test-{server_version}-{auth_str}-{ssl_str}', + depends_on=[EvgTaskDependency(name=f'loadbalanced-{distro_str}-compile')], + run_on=find_large_distro(_DISTRO_NAME).name, # DEVPROD-18763 tags=['loadbalanced', _DISTRO_NAME, _COMPILER, auth_str, ssl_str], commands=[ - FetchBuild.call(build_name=f"loadbalanced-{distro_str}-compile"), + FetchBuild.call(build_name=f'loadbalanced-{distro_str}-compile'), FetchDET.call(), - BootstrapMongoOrchestration().call(vars={ - 'AUTH': auth_str, - 'SSL': ssl_str, - 'MONGODB_VERSION': server_version, - 'TOPOLOGY': 'sharded_cluster', - 'LOAD_BALANCER': 'on', - }), + BootstrapMongoOrchestration().call( + vars={ + 'AUTH': auth_str, + 'SSL': ssl_str, + 'MONGODB_VERSION': server_version, + 'TOPOLOGY': 'sharded_cluster', + 'LOAD_BALANCER': 'on', + } + ), InstallUV.call(), RunSimpleHTTPServer.call(), - FunctionCall(func='start-load-balancer', vars={ - 'MONGODB_URI': 'mongodb://localhost:27017,localhost:27018' - }), - RunTests().call(vars={ - 'AUTH': auth_str, - 'SSL': ssl_str, - 'LOADBALANCED': 'loadbalanced', - 'CC': _COMPILER, - }) + FunctionCall(func='start-load-balancer', vars={'MONGODB_URI': 'mongodb://localhost:27017,localhost:27018'}), + RunTests().call( + vars={ + 'AUTH': auth_str, + 'SSL': ssl_str, + 'LOADBALANCED': 'loadbalanced', + 'CC': _COMPILER, + } + ), ], ) @@ -74,23 +75,19 @@ def make_test_task(auth: bool, ssl: bool, server_version: str): def tasks(): distro_str = make_distro_str(_DISTRO_NAME, _COMPILER, None) yield EvgTask( - name=f"loadbalanced-{distro_str}-compile", + name=f'loadbalanced-{distro_str}-compile', run_on=find_large_distro(_DISTRO_NAME).name, tags=['loadbalanced', _DISTRO_NAME, _COMPILER], commands=[ InstallUV.call(), bash_exec( command_type=EvgCommandType.TEST, - env={ - 'CC': _COMPILER, - 'CFLAGS': '-fno-omit-frame-pointer', - 'SSL': 'OPENSSL' - }, + env={'CC': _COMPILER, 'CFLAGS': '-fno-omit-frame-pointer', 'SSL': 'OPENSSL'}, include_expansions_in_env=['distro_id', 'UV_INSTALL_DIR'], working_dir='mongoc', script='.evergreen/scripts/compile.sh', ), - UploadBuild.call() + UploadBuild.call(), ], ) @@ -109,9 +106,5 @@ def tasks(): def variants(): return [ - BuildVariant( - name="loadbalanced", - display_name="loadbalanced", - tasks=[EvgTaskRef(name='.loadbalanced')] - ), + BuildVariant(name='loadbalanced', display_name='loadbalanced', tasks=[EvgTaskRef(name='.loadbalanced')]), ] diff --git a/.evergreen/config_generator/components/make_docs.py b/.evergreen/config_generator/components/make_docs.py index 05cddc91625..155507cd010 100644 --- a/.evergreen/config_generator/components/make_docs.py +++ b/.evergreen/config_generator/components/make_docs.py @@ -1,21 +1,18 @@ -from shrub.v3.evg_command import EvgCommandType -from shrub.v3.evg_command import s3_put +from shrub.v3.evg_command import EvgCommandType, s3_put from shrub.v3.evg_task import EvgTask from config_generator.components.funcs.install_uv import InstallUV - -from config_generator.etc.function import Function -from config_generator.etc.function import merge_defns +from config_generator.etc.function import Function, merge_defns from config_generator.etc.utils import bash_exec class MakeDocs(Function): - name = "make-docs" + name = 'make-docs' commands = [ bash_exec( command_type=EvgCommandType.TEST, - working_dir="mongoc", - include_expansions_in_env=["distro_id"], + working_dir='mongoc', + include_expansions_in_env=['distro_id'], script="""\ # See SphinxBuild.cmake for EVG_DOCS_BUILD reasoning PATH="${UV_INSTALL_DIR}:$PATH" uv run --frozen --only-group docs env EVG_DOCS_BUILD=1 .evergreen/scripts/build-docs.sh @@ -25,53 +22,52 @@ class MakeDocs(Function): class UploadDocs(Function): - name = "upload-docs" + name = 'upload-docs' commands = [ bash_exec( - working_dir="mongoc/_build/for-docs/src/libbson", + working_dir='mongoc/_build/for-docs/src/libbson', env={ - "AWS_ACCESS_KEY_ID": "${aws_key}", - "AWS_SECRET_ACCESS_KEY": "${aws_secret}", + 'AWS_ACCESS_KEY_ID': '${aws_key}', + 'AWS_SECRET_ACCESS_KEY': '${aws_secret}', }, - script="aws s3 cp doc/html s3://mciuploads/${project}/docs/libbson/${CURRENT_VERSION} --quiet --recursive --acl public-read --region us-east-1", + script='aws s3 cp doc/html s3://mciuploads/${project}/docs/libbson/${CURRENT_VERSION} --quiet --recursive --acl public-read --region us-east-1', ), s3_put( - aws_key="${aws_key}", - aws_secret="${aws_secret}", - bucket="mciuploads", - content_type="text/html", - display_name="libbson docs", - local_file="mongoc/_build/for-docs/src/libbson/doc/html/index.html", - permissions="public-read", - remote_file="${project}/docs/libbson/${CURRENT_VERSION}/index.html", + aws_key='${aws_key}', + aws_secret='${aws_secret}', + bucket='mciuploads', + content_type='text/html', + display_name='libbson docs', + local_file='mongoc/_build/for-docs/src/libbson/doc/html/index.html', + permissions='public-read', + remote_file='${project}/docs/libbson/${CURRENT_VERSION}/index.html', ), bash_exec( - working_dir="mongoc/_build/for-docs/src/libmongoc", + working_dir='mongoc/_build/for-docs/src/libmongoc', env={ - "AWS_ACCESS_KEY_ID": "${aws_key}", - "AWS_SECRET_ACCESS_KEY": "${aws_secret}", + 'AWS_ACCESS_KEY_ID': '${aws_key}', + 'AWS_SECRET_ACCESS_KEY': '${aws_secret}', }, - script="aws s3 cp doc/html s3://mciuploads/${project}/docs/libmongoc/${CURRENT_VERSION} --quiet --recursive --acl public-read --region us-east-1", + script='aws s3 cp doc/html s3://mciuploads/${project}/docs/libmongoc/${CURRENT_VERSION} --quiet --recursive --acl public-read --region us-east-1', ), s3_put( - aws_key="${aws_key}", - aws_secret="${aws_secret}", - bucket="mciuploads", - content_type="text/html", - display_name="libmongoc docs", - local_file="mongoc/_build/for-docs/src/libmongoc/doc/html/index.html", - permissions="public-read", - remote_file="${project}/docs/libmongoc/${CURRENT_VERSION}/index.html", + aws_key='${aws_key}', + aws_secret='${aws_secret}', + bucket='mciuploads', + content_type='text/html', + display_name='libmongoc docs', + local_file='mongoc/_build/for-docs/src/libmongoc/doc/html/index.html', + permissions='public-read', + remote_file='${project}/docs/libmongoc/${CURRENT_VERSION}/index.html', ), ] class UploadManPages(Function): - name = "upload-man-pages" + name = 'upload-man-pages' commands = [ bash_exec( - working_dir="mongoc", - silent=True, + working_dir='mongoc', script="""\ set -o errexit # Get "aha", the ANSI HTML Adapter. @@ -85,24 +81,24 @@ class UploadManPages(Function): """, ), s3_put( - aws_key="${aws_key}", - aws_secret="${aws_secret}", - bucket="mciuploads", - content_type="text/html", - display_name="libbson man pages", - local_file="mongoc/bson-man-pages.html", - permissions="public-read", - remote_file="${project}/man-pages/libbson/${CURRENT_VERSION}/index.html", + aws_key='${aws_key}', + aws_secret='${aws_secret}', + bucket='mciuploads', + content_type='text/html', + display_name='libbson man pages', + local_file='mongoc/bson-man-pages.html', + permissions='public-read', + remote_file='${project}/man-pages/libbson/${CURRENT_VERSION}/index.html', ), s3_put( - aws_key="${aws_key}", - aws_secret="${aws_secret}", - bucket="mciuploads", - content_type="text/html", - display_name="libmongoc man pages", - local_file="mongoc/mongoc-man-pages.html", - permissions="public-read", - remote_file="${project}/man-pages/libmongoc/${CURRENT_VERSION}/index.html", + aws_key='${aws_key}', + aws_secret='${aws_secret}', + bucket='mciuploads', + content_type='text/html', + display_name='libmongoc man pages', + local_file='mongoc/mongoc-man-pages.html', + permissions='public-read', + remote_file='${project}/man-pages/libmongoc/${CURRENT_VERSION}/index.html', ), ] @@ -118,7 +114,7 @@ def functions(): def tasks(): return [ EvgTask( - name="make-docs", + name='make-docs', commands=[ InstallUV.call(), MakeDocs.call(), diff --git a/.evergreen/config_generator/components/mock_server.py b/.evergreen/config_generator/components/mock_server.py index 0c5dc9e16a6..d1fe4d3b04d 100644 --- a/.evergreen/config_generator/components/mock_server.py +++ b/.evergreen/config_generator/components/mock_server.py @@ -6,11 +6,12 @@ from config_generator.components.funcs.run_simple_http_server import RunSimpleHTTPServer from config_generator.etc.utils import bash_exec + def tasks(): return [ EvgTask( - name="mock-server-test", - run_on="ubuntu2204-small", + name='mock-server-test', + run_on='ubuntu2204-small', commands=[ InstallUV.call(), RunSimpleHTTPServer.call(), @@ -24,22 +25,23 @@ def tasks(): command_type=EvgCommandType.TEST, working_dir='mongoc', script='.evergreen/scripts/run-mock-server-tests.sh', - ) + ), ], ) ] + def variants(): return [ BuildVariant( - name="mock-server-test", - display_name="Mock Server Test", + name='mock-server-test', + display_name='Mock Server Test', tasks=[EvgTaskRef(name='mock-server-test')], expansions={ 'CC': 'gcc', 'ASAN': 'on', 'CFLAGS': '-fno-omit-frame-pointer', 'SANITIZE': 'address,undefined', - } + }, ), ] diff --git a/.evergreen/config_generator/components/openssl_compat.py b/.evergreen/config_generator/components/openssl_compat.py index 80c2da5325e..68043171cfe 100644 --- a/.evergreen/config_generator/components/openssl_compat.py +++ b/.evergreen/config_generator/components/openssl_compat.py @@ -1,15 +1,14 @@ -from config_generator.etc.distros import find_large_distro, make_distro_str -from config_generator.etc.function import Function -from config_generator.etc.utils import bash_exec - -from config_generator.components.funcs.fetch_source import FetchSource -from config_generator.components.funcs.install_uv import InstallUV +from itertools import product from shrub.v3.evg_build_variant import BuildVariant from shrub.v3.evg_command import EvgCommandType, FunctionCall from shrub.v3.evg_task import EvgTask, EvgTaskRef -from itertools import product +from config_generator.components.funcs.fetch_source import FetchSource +from config_generator.components.funcs.install_uv import InstallUV +from config_generator.etc.distros import find_large_distro, make_distro_str +from config_generator.etc.function import Function +from config_generator.etc.utils import bash_exec TAG = 'openssl-compat' @@ -81,7 +80,7 @@ def tasks(): FetchSource.call(), InstallUV.call(), OpenSSLSetup.call(vars=vars), - FunctionCall(func="run auth tests"), + FunctionCall(func='run auth tests'), ], ) @@ -102,7 +101,7 @@ def tasks(): FetchSource.call(), InstallUV.call(), OpenSSLSetup.call(vars=vars), - FunctionCall(func="run auth tests"), + FunctionCall(func='run auth tests'), ], ) diff --git a/.evergreen/config_generator/components/sanitizers/asan.py b/.evergreen/config_generator/components/sanitizers/asan.py index b55d161239f..db40ffd0367 100644 --- a/.evergreen/config_generator/components/sanitizers/asan.py +++ b/.evergreen/config_generator/components/sanitizers/asan.py @@ -1,7 +1,6 @@ from shrub.v3.evg_build_variant import BuildVariant from shrub.v3.evg_task import EvgTaskRef - TAG = 'sanitizers-matrix-asan' diff --git a/.evergreen/config_generator/components/sanitizers/asan_cse.py b/.evergreen/config_generator/components/sanitizers/asan_cse.py index c597c48ad37..82a55c61320 100644 --- a/.evergreen/config_generator/components/sanitizers/asan_cse.py +++ b/.evergreen/config_generator/components/sanitizers/asan_cse.py @@ -1,11 +1,7 @@ -from config_generator.etc.compile import generate_compile_tasks - -from config_generator.etc.sanitizers.test import generate_test_tasks - from config_generator.components.cse.openssl import SaslCyrusOpenSSLCompile - from config_generator.components.sanitizers.asan import TAG - +from config_generator.etc.compile import generate_compile_tasks +from config_generator.etc.sanitizers.test import generate_test_tasks # pylint: disable=line-too-long # fmt: off @@ -33,16 +29,12 @@ def tasks(): 'cyrus': SaslCyrusOpenSSLCompile, } - res += generate_compile_tasks( - SSL, TAG, SASL_TO_FUNC, COMPILE_MATRIX, MORE_TAGS - ) + res += generate_compile_tasks(SSL, TAG, SASL_TO_FUNC, COMPILE_MATRIX, MORE_TAGS) res += generate_test_tasks(SSL, TAG, TEST_MATRIX, MORE_TAGS) res += generate_test_tasks( - SSL, TAG, TEST_MATRIX, MORE_TAGS, - MORE_TEST_TAGS=['with-mongocrypt'], - MORE_VARS={'SKIP_CRYPT_SHARED_LIB': 'on'} + SSL, TAG, TEST_MATRIX, MORE_TAGS, MORE_TEST_TAGS=['with-mongocrypt'], MORE_VARS={'SKIP_CRYPT_SHARED_LIB': 'on'} ) return res diff --git a/.evergreen/config_generator/components/sanitizers/asan_sasl.py b/.evergreen/config_generator/components/sanitizers/asan_sasl.py index 0275869de9b..1e05aabeb7e 100644 --- a/.evergreen/config_generator/components/sanitizers/asan_sasl.py +++ b/.evergreen/config_generator/components/sanitizers/asan_sasl.py @@ -1,12 +1,8 @@ +from config_generator.components.sanitizers.asan import TAG +from config_generator.components.sasl.openssl import SaslCyrusOpenSSLCompile from config_generator.etc.compile import generate_compile_tasks - from config_generator.etc.sanitizers.test import generate_test_tasks -from config_generator.components.sasl.openssl import SaslCyrusOpenSSLCompile - -from config_generator.components.sanitizers.asan import TAG - - # pylint: disable=line-too-long # fmt: off COMPILE_MATRIX = [ @@ -30,9 +26,7 @@ def tasks(): 'cyrus': SaslCyrusOpenSSLCompile, } - res += generate_compile_tasks( - SSL, TAG, SASL_TO_FUNC, COMPILE_MATRIX, ['asan'] - ) + res += generate_compile_tasks(SSL, TAG, SASL_TO_FUNC, COMPILE_MATRIX, ['asan']) res += generate_test_tasks(SSL, TAG, TEST_MATRIX, ['asan']) diff --git a/.evergreen/config_generator/components/sanitizers/tsan.py b/.evergreen/config_generator/components/sanitizers/tsan.py index d506f43b887..0c54fa8f64a 100644 --- a/.evergreen/config_generator/components/sanitizers/tsan.py +++ b/.evergreen/config_generator/components/sanitizers/tsan.py @@ -1,7 +1,6 @@ from shrub.v3.evg_build_variant import BuildVariant from shrub.v3.evg_task import EvgTaskRef - TAG = 'sanitizers-matrix-tsan' diff --git a/.evergreen/config_generator/components/sanitizers/tsan_sasl.py b/.evergreen/config_generator/components/sanitizers/tsan_sasl.py index 305326d10b9..7364c4d9255 100644 --- a/.evergreen/config_generator/components/sanitizers/tsan_sasl.py +++ b/.evergreen/config_generator/components/sanitizers/tsan_sasl.py @@ -1,11 +1,7 @@ -from config_generator.etc.compile import generate_compile_tasks - -from config_generator.etc.sanitizers.test import generate_test_tasks - -from config_generator.components.sasl.openssl import SaslCyrusOpenSSLCompile - from config_generator.components.sanitizers.tsan import TAG - +from config_generator.components.sasl.openssl import SaslCyrusOpenSSLCompile +from config_generator.etc.compile import generate_compile_tasks +from config_generator.etc.sanitizers.test import generate_test_tasks # pylint: disable=line-too-long # fmt: off @@ -30,9 +26,7 @@ def tasks(): SSL = 'openssl' SASL_TO_FUNC = {'cyrus': SaslCyrusOpenSSLCompile} - res += generate_compile_tasks( - SSL, TAG, SASL_TO_FUNC, COMPILE_MATRIX, MORE_TAGS - ) + res += generate_compile_tasks(SSL, TAG, SASL_TO_FUNC, COMPILE_MATRIX, MORE_TAGS) res += generate_test_tasks(SSL, TAG, TEST_OPENSSL_MATRIX, MORE_TAGS) diff --git a/.evergreen/config_generator/components/sasl/darwinssl.py b/.evergreen/config_generator/components/sasl/darwinssl.py index 89d91b6412c..2fe81fd8e43 100644 --- a/.evergreen/config_generator/components/sasl/darwinssl.py +++ b/.evergreen/config_generator/components/sasl/darwinssl.py @@ -1,13 +1,11 @@ from shrub.v3.evg_build_variant import BuildVariant from shrub.v3.evg_task import EvgTaskRef -from config_generator.etc.function import merge_defns from config_generator.etc.compile import generate_compile_tasks - +from config_generator.etc.function import merge_defns from config_generator.etc.sasl.compile import CompileCommon from config_generator.etc.sasl.test import generate_test_tasks - SSL = 'darwinssl' TAG = f'sasl-matrix-{SSL}' diff --git a/.evergreen/config_generator/components/sasl/nossl.py b/.evergreen/config_generator/components/sasl/nossl.py index 2f8b7a98037..2f9410a00a5 100644 --- a/.evergreen/config_generator/components/sasl/nossl.py +++ b/.evergreen/config_generator/components/sasl/nossl.py @@ -1,13 +1,11 @@ from shrub.v3.evg_build_variant import BuildVariant from shrub.v3.evg_task import EvgTaskRef -from config_generator.etc.function import merge_defns from config_generator.etc.compile import generate_compile_tasks - +from config_generator.etc.function import merge_defns from config_generator.etc.sasl.compile import CompileCommon from config_generator.etc.sasl.test import generate_test_tasks - SSL = 'nossl' TAG = f'sasl-matrix-{SSL}' diff --git a/.evergreen/config_generator/components/sasl/openssl.py b/.evergreen/config_generator/components/sasl/openssl.py index 40541ddd56f..c880c98a295 100644 --- a/.evergreen/config_generator/components/sasl/openssl.py +++ b/.evergreen/config_generator/components/sasl/openssl.py @@ -1,12 +1,10 @@ from shrub.v3.evg_build_variant import BuildVariant -from config_generator.etc.utils import TaskRef -from config_generator.etc.function import merge_defns from config_generator.etc.compile import generate_compile_tasks - +from config_generator.etc.function import merge_defns from config_generator.etc.sasl.compile import CompileCommon from config_generator.etc.sasl.test import generate_test_tasks - +from config_generator.etc.utils import TaskRef SSL = 'openssl' TAG = f'sasl-matrix-{SSL}' @@ -79,7 +77,7 @@ def tasks(): # PowerPC and zSeries are limited resources. for task in res: - if any(pattern in task.run_on for pattern in ["power", "zseries"]): + if any(pattern in task.run_on for pattern in ['power', 'zseries']): task.patchable = False return res @@ -92,11 +90,11 @@ def variants(): # PowerPC and zSeries are limited resources. for task in TASKS: - if any(pattern in task.run_on for pattern in ["power", "zseries"]): + if any(pattern in task.run_on for pattern in ['power', 'zseries']): tasks.append( TaskRef( name=task.name, - batchtime=1440, # 1 day + batchtime=1440, # 1 day ) ) else: diff --git a/.evergreen/config_generator/components/sasl/winssl.py b/.evergreen/config_generator/components/sasl/winssl.py index 3c56cfd036b..fe3655e183d 100644 --- a/.evergreen/config_generator/components/sasl/winssl.py +++ b/.evergreen/config_generator/components/sasl/winssl.py @@ -1,13 +1,11 @@ from shrub.v3.evg_build_variant import BuildVariant from shrub.v3.evg_task import EvgTaskRef -from config_generator.etc.function import merge_defns from config_generator.etc.compile import generate_compile_tasks - +from config_generator.etc.function import merge_defns from config_generator.etc.sasl.compile import CompileCommon from config_generator.etc.sasl.test import generate_test_tasks - SSL = 'winssl' TAG = f'sasl-matrix-{SSL}' diff --git a/.evergreen/config_generator/components/sbom.py b/.evergreen/config_generator/components/sbom.py index dd2af1b6ed9..5a510b8e060 100644 --- a/.evergreen/config_generator/components/sbom.py +++ b/.evergreen/config_generator/components/sbom.py @@ -1,7 +1,4 @@ -from config_generator.etc.distros import find_small_distro -from config_generator.etc.function import Function, merge_defns -from config_generator.etc.utils import bash_exec - +from pydantic import ConfigDict from shrub.v3.evg_build_variant import BuildVariant from shrub.v3.evg_command import ( BuiltInCommand, @@ -13,8 +10,9 @@ ) from shrub.v3.evg_task import EvgTask, EvgTaskRef -from pydantic import ConfigDict - +from config_generator.etc.distros import find_small_distro +from config_generator.etc.function import Function, merge_defns +from config_generator.etc.utils import bash_exec TAG = 'sbom' @@ -40,12 +38,12 @@ class SBOM(Function): 'AWS_SECRET_ACCESS_KEY', 'AWS_SESSION_TOKEN', ], - script='''\ + script="""\ set -o errexit set -o pipefail kondukto_token="$(aws secretsmanager get-secret-value --secret-id "kondukto-token" --region "us-east-1" --query 'SecretString' --output text)" printf "KONDUKTO_TOKEN: %s\\n" "$kondukto_token" >|expansions.kondukto.yml - ''', + """, ), expansions_update( command_type=EvgCommandType.SETUP, @@ -57,14 +55,14 @@ class SBOM(Function): # Avoid inadvertently using a pre-existing and potentially conflicting Podman config. # Note: podman understands and uses DOCKER_CONFIG despite the name. expansions_update(updates=[KeyValueParam(key='DOCKER_CONFIG', value='${workdir}/.docker')]), - ec2_assume_role(role_arn="arn:aws:iam::901841024863:role/ecr-role-evergreen-ro"), + ec2_assume_role(role_arn='arn:aws:iam::901841024863:role/ecr-role-evergreen-ro'), bash_exec( command_type=EvgCommandType.SETUP, include_expansions_in_env=[ - "AWS_ACCESS_KEY_ID", - "AWS_SECRET_ACCESS_KEY", - "AWS_SESSION_TOKEN", - "DOCKER_CONFIG", + 'AWS_ACCESS_KEY_ID', + 'AWS_SECRET_ACCESS_KEY', + 'AWS_SESSION_TOKEN', + 'DOCKER_CONFIG', ], script='aws ecr get-login-password --region us-east-1 | podman login --username AWS --password-stdin 901841024863.dkr.ecr.us-east-1.amazonaws.com', ), @@ -75,7 +73,7 @@ class SBOM(Function): include_expansions_in_env=[ 'branch_name', 'DOCKER_CONFIG', - "KONDUKTO_TOKEN", + 'KONDUKTO_TOKEN', ], script='.evergreen/scripts/sbom.sh', ), diff --git a/.evergreen/config_generator/components/scan_build.py b/.evergreen/config_generator/components/scan_build.py index 5738f29e3ca..75791c96c72 100644 --- a/.evergreen/config_generator/components/scan_build.py +++ b/.evergreen/config_generator/components/scan_build.py @@ -1,17 +1,12 @@ from shrub.v3.evg_build_variant import BuildVariant -from shrub.v3.evg_command import EvgCommandType -from shrub.v3.evg_command import FunctionCall +from shrub.v3.evg_command import EvgCommandType, FunctionCall from shrub.v3.evg_task import EvgTask, EvgTaskRef from config_generator.components.funcs.install_uv import InstallUV - -from config_generator.etc.distros import find_large_distro -from config_generator.etc.distros import make_distro_str -from config_generator.etc.distros import compiler_to_vars +from config_generator.etc.distros import compiler_to_vars, find_large_distro, make_distro_str from config_generator.etc.function import Function from config_generator.etc.utils import bash_exec - TAG = 'scan-build-matrix' diff --git a/.evergreen/config_generator/etc/compile.py b/.evergreen/config_generator/etc/compile.py index 2b63fa6df02..812062d15f1 100644 --- a/.evergreen/config_generator/etc/compile.py +++ b/.evergreen/config_generator/etc/compile.py @@ -1,11 +1,8 @@ from shrub.v3.evg_task import EvgTask -from config_generator.etc.distros import find_large_distro -from config_generator.etc.distros import make_distro_str -from config_generator.etc.distros import compiler_to_vars - from config_generator.components.funcs.install_uv import InstallUV from config_generator.components.funcs.upload_build import UploadBuild +from config_generator.etc.distros import compiler_to_vars, find_large_distro, make_distro_str def generate_compile_tasks(SSL, TAG, SASL_TO_FUNC, MATRIX, MORE_TAGS=None, MORE_VARS=None): @@ -14,7 +11,7 @@ def generate_compile_tasks(SSL, TAG, SASL_TO_FUNC, MATRIX, MORE_TAGS=None, MORE_ MORE_TAGS = MORE_TAGS if MORE_TAGS else [] MORE_VARS = MORE_VARS if MORE_VARS else {} - for distro_name, compiler, arch, sasls, in MATRIX: + for distro_name, compiler, arch, sasls in MATRIX: tags = [TAG, 'compile', distro_name, compiler] + MORE_TAGS distro = find_large_distro(distro_name) diff --git a/.evergreen/config_generator/etc/cse/compile.py b/.evergreen/config_generator/etc/cse/compile.py index 47ced275552..2827c3146fa 100644 --- a/.evergreen/config_generator/etc/cse/compile.py +++ b/.evergreen/config_generator/etc/cse/compile.py @@ -1,13 +1,9 @@ from typing import ClassVar -from shrub.v3.evg_command import EvgCommand -from shrub.v3.evg_command import EvgCommandType -from shrub.v3.evg_command import expansions_update -from shrub.v3.evg_command import KeyValueParam - -from config_generator.etc.utils import bash_exec +from shrub.v3.evg_command import EvgCommand, EvgCommandType, KeyValueParam, expansions_update from config_generator.etc.function import Function +from config_generator.etc.utils import bash_exec class CompileCommon(Function): diff --git a/.evergreen/config_generator/etc/cse/test.py b/.evergreen/config_generator/etc/cse/test.py index 3057ac38dc2..46940f8790f 100644 --- a/.evergreen/config_generator/etc/cse/test.py +++ b/.evergreen/config_generator/etc/cse/test.py @@ -1,19 +1,15 @@ from itertools import product -from shrub.v3.evg_command import expansions_update -from shrub.v3.evg_command import KeyValueParam +from shrub.v3.evg_command import KeyValueParam, expansions_update from shrub.v3.evg_task import EvgTask, EvgTaskDependency -from config_generator.etc.distros import find_large_distro, find_small_distro -from config_generator.etc.distros import make_distro_str -from config_generator.etc.distros import compiler_to_vars - from config_generator.components.funcs.bootstrap_mongo_orchestration import BootstrapMongoOrchestration from config_generator.components.funcs.fetch_build import FetchBuild from config_generator.components.funcs.fetch_det import FetchDET from config_generator.components.funcs.install_uv import InstallUV from config_generator.components.funcs.run_mock_kms_servers import RunMockKMSServers from config_generator.components.funcs.run_tests import RunTests +from config_generator.etc.distros import compiler_to_vars, find_large_distro, find_small_distro, make_distro_str def generate_test_tasks(SSL, TAG, MATRIX): @@ -28,7 +24,7 @@ def generate_test_tasks(SSL, TAG, MATRIX): for distro_name, compiler, arch, sasl, auths, topologies, server_vers in MATRIX: tags = [TAG, 'test', distro_name, compiler, f'sasl-{sasl}', 'cse'] if distro_name == 'rhel8-latest': - test_distro = find_large_distro(distro_name) # DEVPROD-18763 + test_distro = find_large_distro(distro_name) # DEVPROD-18763 else: test_distro = find_small_distro(distro_name) diff --git a/.evergreen/config_generator/etc/distros.py b/.evergreen/config_generator/etc/distros.py index d4cb1c0c1fb..bef2f731700 100644 --- a/.evergreen/config_generator/etc/distros.py +++ b/.evergreen/config_generator/etc/distros.py @@ -1,7 +1,7 @@ from typing import Literal -from pydantic import BaseModel, validator from packaging.version import Version +from pydantic import BaseModel, validator class Distro(BaseModel): @@ -21,14 +21,17 @@ class Distro(BaseModel): os: str | None = None os_type: Literal['linux', 'macos', 'windows'] | None = None os_ver: str | None = None - vs_ver: Literal[ - '2013', - '2015', - '2017', - '2019', - '2022', - 'vsCurrent', - ] | None = None + vs_ver: ( + Literal[ + '2013', + '2015', + '2017', + '2019', + '2022', + 'vsCurrent', + ] + | None + ) = None size: Literal['small', 'large'] | None = None arch: Literal['arm64', 'power', 'zseries'] | None = None @@ -61,7 +64,6 @@ def ls_distro(name, **kwargs): RHEL_DISTROS = [ *ls_distro(name='rhel7-latest', os='rhel', os_type='linux', os_ver='7'), *ls_distro(name='rhel8-latest', os='rhel', os_type='linux', os_ver='8'), - *ls_distro(name='rhel80', os='rhel', os_type='linux', os_ver='8.0'), *ls_distro(name='rhel84', os='rhel', os_type='linux', os_ver='8.4'), *ls_distro(name='rhel90', os='rhel', os_type='linux', os_ver='9.0'), @@ -69,7 +71,7 @@ def ls_distro(name, **kwargs): *ls_distro(name='rhel92', os='rhel', os_type='linux', os_ver='9.2'), *ls_distro(name='rhel93', os='rhel', os_type='linux', os_ver='9.3'), *ls_distro(name='rhel94', os='rhel', os_type='linux', os_ver='9.4'), - *ls_distro(name='rhel95', os='rhel', os_type='linux', os_ver='9.5'), # rhel9-latest + *ls_distro(name='rhel95', os='rhel', os_type='linux', os_ver='9.5'), # rhel9-latest ] RHEL_POWER_DISTROS = [ @@ -150,14 +152,13 @@ def make_distro_str(distro_name, compiler, arch) -> str: # ('windows-vsCurrent-2022', 'mingw', None) -> windows-2022-mingw # ('windows-vsCurrent', 'vs2017x64', None) -> windows-2019-vs2017-x64 # ('windows-vsCurrent', 'mingw', None) -> windows-2019-mingw - maybe_arch = compiler[len('vs20XY'):] + maybe_arch = compiler[len('vs20XY') :] if maybe_arch in ('x86', 'x64'): - compiler_str = compiler[:-len(maybe_arch)] + '-' + maybe_arch + compiler_str = compiler[: -len(maybe_arch)] + '-' + maybe_arch else: compiler_str = compiler if distro_name.startswith('windows-vsCurrent-'): - distro_str = 'windows-' + \ - distro_name[len('windows-vsCurrent-'):] + f'-{compiler_str}' + distro_str = 'windows-' + distro_name[len('windows-vsCurrent-') :] + f'-{compiler_str}' else: distro_str = 'windows-2019-' + compiler_str else: diff --git a/.evergreen/config_generator/etc/function.py b/.evergreen/config_generator/etc/function.py index 61398dced5d..aa9ec13da5e 100644 --- a/.evergreen/config_generator/etc/function.py +++ b/.evergreen/config_generator/etc/function.py @@ -1,9 +1,7 @@ -from typing import ClassVar -from typing import Mapping from collections import ChainMap +from typing import ClassVar, Mapping -from shrub.v3.evg_command import EvgCommand -from shrub.v3.evg_command import FunctionCall +from shrub.v3.evg_command import EvgCommand, FunctionCall class Function: diff --git a/.evergreen/config_generator/etc/sanitizers/test.py b/.evergreen/config_generator/etc/sanitizers/test.py index 2dad2d4279d..44310d1b2c1 100644 --- a/.evergreen/config_generator/etc/sanitizers/test.py +++ b/.evergreen/config_generator/etc/sanitizers/test.py @@ -1,20 +1,16 @@ from itertools import product -from shrub.v3.evg_command import expansions_update -from shrub.v3.evg_command import KeyValueParam +from shrub.v3.evg_command import KeyValueParam, expansions_update from shrub.v3.evg_task import EvgTask, EvgTaskDependency -from config_generator.etc.distros import find_large_distro, find_small_distro -from config_generator.etc.distros import make_distro_str -from config_generator.etc.distros import compiler_to_vars - from config_generator.components.funcs.bootstrap_mongo_orchestration import BootstrapMongoOrchestration from config_generator.components.funcs.fetch_build import FetchBuild from config_generator.components.funcs.fetch_det import FetchDET from config_generator.components.funcs.install_uv import InstallUV -from config_generator.components.funcs.run_simple_http_server import RunSimpleHTTPServer from config_generator.components.funcs.run_mock_kms_servers import RunMockKMSServers +from config_generator.components.funcs.run_simple_http_server import RunSimpleHTTPServer from config_generator.components.funcs.run_tests import RunTests +from config_generator.etc.distros import compiler_to_vars, find_large_distro, find_small_distro, make_distro_str def generate_test_tasks(SSL, TAG, MATRIX, MORE_COMPILE_TAGS=None, MORE_TEST_TAGS=None, MORE_VARS=None): @@ -31,12 +27,10 @@ def generate_test_tasks(SSL, TAG, MATRIX, MORE_COMPILE_TAGS=None, MORE_TEST_TAGS MORE_VARS = MORE_VARS if MORE_VARS else {} for distro_name, compiler, arch, sasl, auths, topologies, server_vers in MATRIX: - tags = [ - TAG, 'test', distro_name, compiler, f'sasl-{sasl}' - ] + MORE_COMPILE_TAGS + tags = [TAG, 'test', distro_name, compiler, f'sasl-{sasl}'] + MORE_COMPILE_TAGS if distro_name == 'rhel8-latest': - test_distro = find_large_distro(distro_name) # DEVPROD-18763 + test_distro = find_large_distro(distro_name) # DEVPROD-18763 else: test_distro = find_small_distro(distro_name) @@ -78,9 +72,7 @@ def generate_test_tasks(SSL, TAG, MATRIX, MORE_COMPILE_TAGS=None, MORE_TEST_TAGS ] if 'cse' in MORE_COMPILE_TAGS: - updates.append( - KeyValueParam(key='CLIENT_SIDE_ENCRYPTION', value='on') - ) + updates.append(KeyValueParam(key='CLIENT_SIDE_ENCRYPTION', value='on')) for key, value in MORE_VARS.items(): updates.append(KeyValueParam(key=key, value=value)) diff --git a/.evergreen/config_generator/etc/sasl/compile.py b/.evergreen/config_generator/etc/sasl/compile.py index f3246728010..05846e88595 100644 --- a/.evergreen/config_generator/etc/sasl/compile.py +++ b/.evergreen/config_generator/etc/sasl/compile.py @@ -1,11 +1,9 @@ from typing import ClassVar -from shrub.v3.evg_command import EvgCommand -from shrub.v3.evg_command import EvgCommandType - -from config_generator.etc.utils import bash_exec +from shrub.v3.evg_command import EvgCommand, EvgCommandType from config_generator.etc.function import Function +from config_generator.etc.utils import bash_exec class CompileCommon(Function): diff --git a/.evergreen/config_generator/etc/sasl/test.py b/.evergreen/config_generator/etc/sasl/test.py index 33cad9da182..f10473c7a4f 100644 --- a/.evergreen/config_generator/etc/sasl/test.py +++ b/.evergreen/config_generator/etc/sasl/test.py @@ -1,19 +1,15 @@ from itertools import product -from shrub.v3.evg_command import expansions_update -from shrub.v3.evg_command import KeyValueParam +from shrub.v3.evg_command import KeyValueParam, expansions_update from shrub.v3.evg_task import EvgTask, EvgTaskDependency -from config_generator.etc.distros import find_large_distro, find_small_distro -from config_generator.etc.distros import make_distro_str -from config_generator.etc.distros import compiler_to_vars - from config_generator.components.funcs.bootstrap_mongo_orchestration import BootstrapMongoOrchestration from config_generator.components.funcs.fetch_build import FetchBuild from config_generator.components.funcs.fetch_det import FetchDET from config_generator.components.funcs.install_uv import InstallUV from config_generator.components.funcs.run_simple_http_server import RunSimpleHTTPServer from config_generator.components.funcs.run_tests import RunTests +from config_generator.etc.distros import compiler_to_vars, find_large_distro, find_small_distro, make_distro_str def generate_test_tasks(SSL, TAG, MATRIX): @@ -28,7 +24,7 @@ def generate_test_tasks(SSL, TAG, MATRIX): for distro_name, compiler, arch, sasl, auths, topologies, server_vers in MATRIX: tags = [TAG, 'test', distro_name, compiler] if distro_name == 'rhel8-latest': - test_distro = find_large_distro(distro_name) # DEVPROD-18763 + test_distro = find_large_distro(distro_name) # DEVPROD-18763 else: test_distro = find_small_distro(distro_name) diff --git a/.evergreen/config_generator/etc/utils.py b/.evergreen/config_generator/etc/utils.py index 0dd18327883..3a972d996dd 100644 --- a/.evergreen/config_generator/etc/utils.py +++ b/.evergreen/config_generator/etc/utils.py @@ -3,14 +3,13 @@ from inspect import isclass from pathlib import Path from textwrap import dedent -from typing import (Any, Iterable, Literal, Mapping, Type, TypeVar, - Union, cast) +from typing import Any, Iterable, Literal, Mapping, Type, TypeVar, Union, cast import yaml from shrub.v3.evg_command import EvgCommandType, subprocess_exec from shrub.v3.evg_project import EvgProject -from shrub.v3.shrub_service import ConfigDumper from shrub.v3.evg_task import EvgTaskRef +from shrub.v3.shrub_service import ConfigDumper from typing_extensions import get_args, get_origin, get_type_hints T = TypeVar('T') @@ -39,8 +38,8 @@ def bash_exec( **kwargs, ): ret = subprocess_exec( - binary="bash", - args=["-c", dedent(script)], + binary='bash', + args=['-c', dedent(script)], include_expansions_in_env=list(include_expansions_in_env) if include_expansions_in_env else None, working_dir=working_dir, command_type=command_type, @@ -49,7 +48,7 @@ def bash_exec( ) if retry_on_failure is not None: - ret.params |= {"retry_on_failure": retry_on_failure} + ret.params |= {'retry_on_failure': retry_on_failure} return ret @@ -75,7 +74,7 @@ def all_components(): # Helper function to print component name for diagnostic purposes. def component_name(component): component_prefix = 'config_generator.components.' - res = component.__name__[len(component_prefix):] + res = component.__name__[len(component_prefix) :] return res @@ -118,13 +117,9 @@ def represent_mapping(self, tag, mapping, flow_style=False): 'args', ] - ordered = { - field: mapping.pop(field) for field in before if field in mapping - } + ordered = {field: mapping.pop(field) for field in before if field in mapping} - suffix = { - field: mapping.pop(field) for field in after if field in mapping - } + suffix = {field: mapping.pop(field) for field in after if field in mapping} ordered.update(sorted(mapping.items())) ordered.update(suffix) @@ -177,6 +172,4 @@ def all_possible(typ: Type[T]) -> Iterable[T]: # Reconstruct as a NamedTuple: yield typ(**items) # type: ignore else: - raise TypeError( - f'Do not know how to do "all_possible" of type {typ!r} ({origin=})' - ) + raise TypeError(f'Do not know how to do "all_possible" of type {typ!r} ({origin=})') diff --git a/.evergreen/config_generator/generate.py b/.evergreen/config_generator/generate.py index f32b79ef046..6b4d3cf7e52 100644 --- a/.evergreen/config_generator/generate.py +++ b/.evergreen/config_generator/generate.py @@ -4,16 +4,14 @@ import sys - from importlib import import_module - GENERATOR_NAMES = [ - "functions", - "tasks", - "task_groups", - "variants", - "legacy_config", + 'functions', + 'tasks', + 'task_groups', + 'variants', + 'legacy_config', ] @@ -23,11 +21,11 @@ def main(): assert sys.version_info.minor >= 10 for name in GENERATOR_NAMES: - m = import_module(f"config_generator.generators.{name}") - print(f"Running {name}.generate()...") + m = import_module(f'config_generator.generators.{name}') + print(f'Running {name}.generate()...') m.generate() - print(f"Running {name}.generate()... done.") + print(f'Running {name}.generate()... done.') -if __name__ == "__main__": +if __name__ == '__main__': main() diff --git a/.evergreen/generated_configs/functions.yml b/.evergreen/generated_configs/functions.yml index 1f603727854..45987fbabbc 100644 --- a/.evergreen/generated_configs/functions.yml +++ b/.evergreen/generated_configs/functions.yml @@ -760,7 +760,6 @@ functions: params: binary: bash working_dir: mongoc - silent: true args: - -c - | diff --git a/.evergreen/legacy_config_generator/evergreen_config_generator/__init__.py b/.evergreen/legacy_config_generator/evergreen_config_generator/__init__.py index b5bb4aa3e0f..f6394b85b8a 100644 --- a/.evergreen/legacy_config_generator/evergreen_config_generator/__init__.py +++ b/.evergreen/legacy_config_generator/evergreen_config_generator/__init__.py @@ -13,40 +13,38 @@ # limitations under the License. -import sys from collections import OrderedDict as OD from typing import Any, Iterable, Mapping, MutableMapping, MutableSequence, Sequence, Union +import yaml +import yamlloader + Scalar = Union[str, bool, int, None, float] -"YAML simple schema scalar types" -ValueSequence = Sequence["Value"] -"Sequence of YAML simple values" -MutableValueArray = MutableSequence["Value"] -"A mutable sequence of JSON values" -ValueMapping = Mapping[Scalar, "Value"] -"A YAML mapping type (arbitrary scalars as keys)" -MutableValueMapping = MutableMapping[Scalar, "Value"] -"A mutable YAML mapping type" +'YAML simple schema scalar types' +ValueSequence = Sequence['Value'] +'Sequence of YAML simple values' +MutableValueArray = MutableSequence['Value'] +'A mutable sequence of JSON values' +ValueMapping = Mapping[Scalar, 'Value'] +'A YAML mapping type (arbitrary scalars as keys)' +MutableValueMapping = MutableMapping[Scalar, 'Value'] +'A mutable YAML mapping type' Value = Union[ValueSequence, ValueMapping, Scalar] -"Any YAML simple value" +'Any YAML simple value' MutableValue = Union[MutableValueMapping, MutableValueArray, Scalar] -"Any YAML simple value, which may be a mutable sequence or map" +'Any YAML simple value, which may be a mutable sequence or map' ValueOrderedDict = OD[Scalar, Value] -"An OrderedDict of YAML values" - - -import yaml -import yamlloader +'An OrderedDict of YAML values' class ConfigObject(object): @property def name(self) -> str: - return "UNSET" + return 'UNSET' def to_dict(self) -> Value: - return OD([("name", self.name)]) + return OD([('name', self.name)]) # We want legible YAML tasks: @@ -72,8 +70,8 @@ def __init__(self, *args: Value, **kwargs: Value): self.add_multi_representer(ConfigObject, type(self).represent_config_object) def represent_scalar(self, tag: str, value: Value, style: str | None = None) -> yaml.ScalarNode: - if isinstance(value, (str)) and "\n" in value: - style = "|" + if isinstance(value, (str)) and '\n' in value: + style = '|' return super().represent_scalar(tag, value, style) # type: ignore def represent_set(self, data: Iterable[Value]) -> yaml.MappingNode: @@ -92,7 +90,7 @@ def generate(config: Any, path: str): """Dump config to a file as YAML. config is a dict, preferably an OrderedDict. path is a file path. """ - f = open(path, "w+") + f = open(path, 'w+') f.write( """#################################### # Evergreen configuration diff --git a/.evergreen/legacy_config_generator/evergreen_config_generator/functions.py b/.evergreen/legacy_config_generator/evergreen_config_generator/functions.py index 75d0ab0c229..99a05f0f064 100644 --- a/.evergreen/legacy_config_generator/evergreen_config_generator/functions.py +++ b/.evergreen/legacy_config_generator/evergreen_config_generator/functions.py @@ -18,33 +18,33 @@ from evergreen_config_generator import ConfigObject -from . import Value, MutableValueMapping, ValueMapping, ValueOrderedDict +from . import MutableValueMapping, Value, ValueMapping, ValueOrderedDict def func(func_name: str, **kwargs: Value) -> MutableValueMapping: - od: MutableValueMapping = OD([("func", func_name)]) + od: MutableValueMapping = OD([('func', func_name)]) if kwargs: - od["vars"] = OD(sorted(kwargs.items())) + od['vars'] = OD(sorted(kwargs.items())) return od def s3_put(remote_file: str, project_path: bool = True, **kwargs: Value) -> ValueMapping: if project_path: - remote_file = "${project}/" + remote_file + remote_file = '${project}/' + remote_file return ValueOrderedDict( [ - ("command", "s3.put"), + ('command', 's3.put'), ( - "params", + 'params', ValueOrderedDict( ( - ("aws_key", "${aws_key}"), - ("aws_secret", "${aws_secret}"), - ("remote_file", remote_file), - ("bucket", "mciuploads"), - ("permissions", "public-read"), + ('aws_key', '${aws_key}'), + ('aws_secret', '${aws_secret}'), + ('remote_file', remote_file), + ('bucket', 'mciuploads'), + ('permissions', 'public-read'), *kwargs.items(), ) ), @@ -54,7 +54,7 @@ def s3_put(remote_file: str, project_path: bool = True, **kwargs: Value) -> Valu def strip_lines(s: str) -> str: - return "\n".join(line for line in s.split("\n") if line.strip()) + return '\n'.join(line for line in s.split('\n') if line.strip()) def shell_exec( @@ -70,50 +70,50 @@ def shell_exec( redirect_standard_error_to_output: bool = False, include_expansions_in_env: Iterable[str] = (), ) -> ValueMapping: - dedented = "" + dedented = '' if errexit: - dedented += "set -o errexit\n" + dedented += 'set -o errexit\n' if xtrace: - dedented += "set -o xtrace\n" + dedented += 'set -o xtrace\n' dedented += dedent(strip_lines(script)) - command = ValueOrderedDict([("command", "shell.exec")]) + command = ValueOrderedDict([('command', 'shell.exec')]) if test: - command["type"] = "test" + command['type'] = 'test' - command["params"] = OD() + command['params'] = OD() if silent: - command["params"]["silent"] = True + command['params']['silent'] = True if working_dir is not None: - command["params"]["working_dir"] = working_dir + command['params']['working_dir'] = working_dir if continue_on_err: - command["params"]["continue_on_err"] = True + command['params']['continue_on_err'] = True if background: - command["params"]["background"] = True + command['params']['background'] = True if add_expansions_to_env: - command["params"]["add_expansions_to_env"] = True + command['params']['add_expansions_to_env'] = True if redirect_standard_error_to_output: - command["params"]["redirect_standard_error_to_output"] = True + command['params']['redirect_standard_error_to_output'] = True if include_expansions_in_env: - command["params"]["include_expansions_in_env"] = list(include_expansions_in_env) + command['params']['include_expansions_in_env'] = list(include_expansions_in_env) - command["params"]["shell"] = "bash" - command["params"]["script"] = dedented + command['params']['shell'] = 'bash' + command['params']['script'] = dedented return command def targz_pack(target: str, source_dir: str, *include: str) -> ValueMapping: return OD( [ - ("command", "archive.targz_pack"), - ("params", OD([("target", target), ("source_dir", source_dir), ("include", list(include))])), + ('command', 'archive.targz_pack'), + ('params', OD([('target', target), ('source_dir', source_dir), ('include', list(include))])), ] ) diff --git a/.evergreen/legacy_config_generator/evergreen_config_generator/taskgroups.py b/.evergreen/legacy_config_generator/evergreen_config_generator/taskgroups.py index 47263e3b83b..cd96e745edd 100644 --- a/.evergreen/legacy_config_generator/evergreen_config_generator/taskgroups.py +++ b/.evergreen/legacy_config_generator/evergreen_config_generator/taskgroups.py @@ -13,6 +13,7 @@ # limitations under the License. from typing import MutableMapping + from evergreen_config_generator import ConfigObject from . import Value, ValueSequence @@ -42,16 +43,16 @@ def to_dict(self) -> Value: # See possible TaskGroup attributes from the Evergreen wiki: # https://github.com/evergreen-ci/evergreen/wiki/Project-Configuration-Files#task-groups attrs = [ - "setup_group", - "teardown_group", - "setup_task", - "teardown_task", - "max_hosts", - "timeout", - "setup_group_can_fail_task", - "setup_group_timeout_secs", - "share_processes", - "tasks", + 'setup_group', + 'teardown_group', + 'setup_task', + 'teardown_task', + 'max_hosts', + 'timeout', + 'setup_group_can_fail_task', + 'setup_group_timeout_secs', + 'share_processes', + 'tasks', ] for i in attrs: diff --git a/.evergreen/legacy_config_generator/evergreen_config_generator/tasks.py b/.evergreen/legacy_config_generator/evergreen_config_generator/tasks.py index 3ea8072591d..01824659772 100644 --- a/.evergreen/legacy_config_generator/evergreen_config_generator/tasks.py +++ b/.evergreen/legacy_config_generator/evergreen_config_generator/tasks.py @@ -12,17 +12,16 @@ # See the License for the specific language governing permissions and # limitations under the License. -from collections import OrderedDict as OD import copy -from itertools import chain, product import itertools +from collections import OrderedDict as OD +from itertools import chain, product from typing import ClassVar, Iterable, Literal, Mapping, MutableMapping, Sequence, Union from evergreen_config_generator import ConfigObject from evergreen_config_generator.functions import func -from . import Value, MutableValueMapping, ValueSequence - +from . import MutableValueMapping, Value, ValueSequence DependencySpec = Union[str, Mapping[str, Value]] @@ -44,7 +43,7 @@ def __init__( self._depends_on = list(map(self._normal_dep, depends_on)) if exec_timeout_secs is not None: - self.options["exec_timeout_secs"] = exec_timeout_secs + self.options['exec_timeout_secs'] = exec_timeout_secs @property def dependencies(self) -> Sequence[Mapping[str, Value]]: @@ -54,7 +53,7 @@ def dependencies(self) -> Sequence[Mapping[str, Value]]: def _normal_dep(self, spec: DependencySpec) -> Mapping[str, Value]: if isinstance(spec, str): - return OD([("name", spec)]) + return OD([('name', spec)]) return spec @property @@ -83,7 +82,7 @@ def additional_tags(self) -> Iterable[str]: def add_dependency(self, dependency: DependencySpec): if isinstance(dependency, str): - dependency = OD([("name", dependency)]) + dependency = OD([('name', dependency)]) self._depends_on.append(dependency) @@ -91,15 +90,15 @@ def to_dict(self): task: MutableValueMapping = super().to_dict() # type: ignore assert isinstance(task, MutableMapping) if self.tags: - task["tags"] = list(self.tags) + task['tags'] = list(self.tags) task.update(self.options) deps: Sequence[MutableValueMapping] = list(self.dependencies) # type: ignore if deps: if len(deps) == 1: - task["depends_on"] = OD(deps[0]) + task['depends_on'] = OD(deps[0]) else: - task["depends_on"] = copy.deepcopy(deps) - task["commands"] = list( + task['depends_on'] = copy.deepcopy(deps) + task['commands'] = list( itertools.chain( self.pre_commands(), self.main_commands(), @@ -149,7 +148,7 @@ def both_or_neither(rule0: bool, rule1: bool) -> None: class SettingsAccess: - def __init__(self, inst: "MatrixTask") -> None: + def __init__(self, inst: 'MatrixTask') -> None: self._task = inst def __getattr__(self, __setting: str) -> str | bool: @@ -170,18 +169,18 @@ def display(self, axis_name: str) -> str: value = self.setting_value(axis_name) if value is False: # E.g., if self.auth is False, return 'noauth'. - return f"no{axis_name}" + return f'no{axis_name}' elif value is True: return axis_name else: return value - def on_off(self, key: str, val: str) -> Literal["on", "off"]: - return "on" if self.setting_value(key) == val else "off" + def on_off(self, key: str, val: str) -> Literal['on', 'off']: + return 'on' if self.setting_value(key) == val else 'off' @property def name(self) -> str: - return "-".join(self.name_parts()) + return '-'.join(self.name_parts()) def name_parts(self) -> Iterable[str]: raise NotImplementedError @@ -191,24 +190,24 @@ def settings(self) -> SettingsAccess: return SettingsAccess(self) def setting_value(self, axis: str) -> str | bool: - assert ( - axis in type(self).axes.keys() - ), f'Attempted to inspect setting "{axis}", which is not defined for this task type' + assert axis in type(self).axes.keys(), ( + f'Attempted to inspect setting "{axis}", which is not defined for this task type' + ) return self._settings[axis] def setting_eq(self, axis: str, val: str | bool) -> bool: current = self.setting_value(axis) options = type(self).axes[axis] - assert ( - val in options - ), f'Looking for value "{val}" on setting "{axis}", but that is not a supported option (Expects one of {options})' + assert val in options, ( + f'Looking for value "{val}" on setting "{axis}", but that is not a supported option (Expects one of {options})' + ) return current == val def is_valid_combination(self) -> bool: try: return self.do_is_valid_combination() except Prohibited: - print(f"Ignoring invalid combination {self.name!r}") + print(f'Ignoring invalid combination {self.name!r}') return False def do_is_valid_combination(self) -> bool: diff --git a/.evergreen/legacy_config_generator/evergreen_config_generator/variants.py b/.evergreen/legacy_config_generator/evergreen_config_generator/variants.py index 3d328b95dcb..722919b0b6a 100644 --- a/.evergreen/legacy_config_generator/evergreen_config_generator/variants.py +++ b/.evergreen/legacy_config_generator/evergreen_config_generator/variants.py @@ -13,6 +13,7 @@ # limitations under the License. from typing import Iterable, Mapping + from evergreen_config_generator import ConfigObject from . import ValueMapping @@ -48,7 +49,7 @@ def name(self): def to_dict(self): v = super(Variant, self).to_dict() - for i in "display_name", "expansions", "run_on", "tasks", "patchable", "batchtime", "tags", "display_tasks": + for i in 'display_name', 'expansions', 'run_on', 'tasks', 'patchable', 'batchtime', 'tags', 'display_tasks': attr = getattr(self, i) # Allow `False`, but ignore empty lists and dicts. diff --git a/.evergreen/legacy_config_generator/evergreen_config_lib/__init__.py b/.evergreen/legacy_config_generator/evergreen_config_lib/__init__.py index 1841636e15d..5634cb5f855 100644 --- a/.evergreen/legacy_config_generator/evergreen_config_lib/__init__.py +++ b/.evergreen/legacy_config_generator/evergreen_config_lib/__init__.py @@ -13,6 +13,7 @@ # limitations under the License. from typing import Iterable + from evergreen_config_generator.functions import shell_exec @@ -30,7 +31,7 @@ def shell_mongoc( ): return shell_exec( script, - working_dir="mongoc", + working_dir='mongoc', test=test, errexit=errexit, xtrace=xtrace, diff --git a/.evergreen/legacy_config_generator/evergreen_config_lib/functions.py b/.evergreen/legacy_config_generator/evergreen_config_lib/functions.py index 6de7c75a93e..b8175fcdd1d 100644 --- a/.evergreen/legacy_config_generator/evergreen_config_lib/functions.py +++ b/.evergreen/legacy_config_generator/evergreen_config_lib/functions.py @@ -14,158 +14,240 @@ from collections import OrderedDict as OD -from evergreen_config_generator.functions import ( - Function, s3_put, shell_exec) +from evergreen_config_generator.functions import Function, s3_put + from evergreen_config_lib import shell_mongoc build_path = '${build_variant}/${revision}/${version_id}/${build_id}' -all_functions = OD([ - ('install ssl', Function( - shell_mongoc(r''' - .evergreen/scripts/install-ssl.sh - ''', test=False, add_expansions_to_env=True), - )), - ('upload coverage', Function( - shell_mongoc(r''' - export AWS_ACCESS_KEY_ID=${aws_key} - export AWS_SECRET_ACCESS_KEY=${aws_secret} - aws s3 cp coverage s3://mciuploads/${project}/%s/coverage/ --recursive --acl public-read --region us-east-1 - ''' % (build_path,), test=False, silent=True), - s3_put(build_path + '/coverage/index.html', aws_key='${aws_key}', - aws_secret='${aws_secret}', - local_file='mongoc/coverage/index.html', bucket='mciuploads', - permissions='public-read', content_type='text/html', - display_name='Coverage Report'), - )), - ('upload scan artifacts', Function( - shell_mongoc(r''' - if find scan -name \*.html | grep -q html; then - (cd scan && find . -name index.html -exec echo "
  • {}
  • " \;) >> scan.html - else - echo "No issues found" > scan.html - fi - '''), - shell_mongoc(r''' - export AWS_ACCESS_KEY_ID=${aws_key} - export AWS_SECRET_ACCESS_KEY=${aws_secret} - aws s3 cp scan s3://mciuploads/${project}/%s/scan/ --recursive --acl public-read --region us-east-1 - ''' % (build_path,), test=False, silent=True), - s3_put(build_path + '/scan/index.html', aws_key='${aws_key}', - aws_secret='${aws_secret}', local_file='mongoc/scan.html', - bucket='mciuploads', permissions='public-read', - content_type='text/html', display_name='Scan Build Report'), - )), - # Use "silent=True" to hide output since errors may contain credentials. - ('run auth tests', Function( - shell_mongoc(r''' - .evergreen/scripts/run-auth-tests.sh - ''', add_expansions_to_env=True), - )), - ('link sample program', Function( - shell_mongoc(r''' - # Compile a program that links dynamically or statically to libmongoc, - # using variables from pkg-config or CMake's find_package command. - export BUILD_SAMPLE_WITH_CMAKE=${BUILD_SAMPLE_WITH_CMAKE} - export ENABLE_SSL=${ENABLE_SSL} - export ENABLE_SNAPPY=${ENABLE_SNAPPY} - PATH="${UV_INSTALL_DIR}:$PATH" - LINK_STATIC= .evergreen/scripts/link-sample-program.sh - LINK_STATIC=1 .evergreen/scripts/link-sample-program.sh - ''', - include_expansions_in_env=['distro_id', 'UV_INSTALL_DIR']), - )), - ('link sample program bson', Function( - shell_mongoc(r''' - # Compile a program that links dynamically or statically to libbson, - # using variables from pkg-config or from CMake's find_package command. - PATH="${UV_INSTALL_DIR}:$PATH" - BUILD_SAMPLE_WITH_CMAKE= LINK_STATIC= .evergreen/scripts/link-sample-program-bson.sh - BUILD_SAMPLE_WITH_CMAKE= LINK_STATIC=1 .evergreen/scripts/link-sample-program-bson.sh - BUILD_SAMPLE_WITH_CMAKE=1 LINK_STATIC= .evergreen/scripts/link-sample-program-bson.sh - BUILD_SAMPLE_WITH_CMAKE=1 LINK_STATIC=1 .evergreen/scripts/link-sample-program-bson.sh - ''', - include_expansions_in_env=['distro_id', 'UV_INSTALL_DIR']), - )), - ('link sample program MSVC', Function( - shell_mongoc(r''' - # Build libmongoc with CMake and compile a program that links - # dynamically or statically to it, using variables from CMake's - # find_package command. - export ENABLE_SSL=${ENABLE_SSL} - export ENABLE_SNAPPY=${ENABLE_SNAPPY} - PATH="${UV_INSTALL_DIR}:$PATH" - LINK_STATIC= cmd.exe /c .\\.evergreen\\scripts\\link-sample-program-msvc.cmd - LINK_STATIC=1 cmd.exe /c .\\.evergreen\\scripts\\link-sample-program-msvc.cmd - ''') - )), - ('link sample program mingw', Function( - shell_mongoc(r''' - # Build libmongoc with CMake and compile a program that links - # dynamically to it, using variables from pkg-config.exe. - PATH="${UV_INSTALL_DIR}:$PATH" - cmd.exe /c .\\.evergreen\\scripts\\link-sample-program-mingw.cmd - ''') - )), - ('link sample program MSVC bson', Function( - shell_mongoc(r''' - # Build libmongoc with CMake and compile a program that links - # dynamically or statically to it, using variables from CMake's - # find_package command. - export ENABLE_SSL=${ENABLE_SSL} - export ENABLE_SNAPPY=${ENABLE_SNAPPY} - PATH="${UV_INSTALL_DIR}:$PATH" - LINK_STATIC= cmd.exe /c .\\.evergreen\\scripts\\link-sample-program-msvc-bson.cmd - LINK_STATIC=1 cmd.exe /c .\\.evergreen\\scripts\\link-sample-program-msvc-bson.cmd - ''') - )), - ('link sample program mingw bson', Function( - shell_mongoc(r''' - # Build libmongoc with CMake and compile a program that links - # dynamically to it, using variables from pkg-config.exe. - PATH="${UV_INSTALL_DIR}:$PATH" - cmd.exe /c .\\.evergreen\\scripts\\link-sample-program-mingw-bson.cmd - ''') - )), - ('update codecov.io', Function( - shell_mongoc(r''' - # Note: coverage is currently only enabled on the ubuntu1804 distro. - # This script does not support MacOS, Windows, or non-x86_64 distros. - # Update accordingly if code coverage is expanded to other distros. - curl -Os https://uploader.codecov.io/latest/linux/codecov - chmod +x codecov - # -Z: Exit with a non-zero value if error. - # -g: Run with gcov support. - # -t: Codecov upload token. - # perl: filter verbose "Found" list and "Processing" messages. - ./codecov -Zgt "${codecov_token}" | perl -lne 'print if not m|^.*\.gcov(\.\.\.)?$|' - ''', test=False), - )), - ('compile coverage', Function( - shell_mongoc(r''' - COVERAGE=ON .evergreen/scripts/compile.sh - ''', add_expansions_to_env=True), - )), - ('run aws tests', Function( - # Assume role to get AWS secrets. - { - "command": "ec2.assume_role", - "params": { - "role_arn": "${aws_test_secrets_role}" - } - }, - - shell_mongoc(r''' - pushd ../drivers-evergreen-tools/.evergreen/auth_aws - ./setup_secrets.sh drivers/aws_auth - popd # ../drivers-evergreen-tools/.evergreen/auth_aws - ''', include_expansions_in_env=["AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN"]), - - shell_mongoc(r''' - pushd ../drivers-evergreen-tools/.evergreen/auth_aws - . ./activate-authawsvenv.sh - popd # ../drivers-evergreen-tools/.evergreen/auth_aws - .evergreen/scripts/run-aws-tests.sh - ''', include_expansions_in_env=["TESTCASE"]) - )), -]) +all_functions = OD( + [ + ( + 'install ssl', + Function( + shell_mongoc( + '.evergreen/scripts/install-ssl.sh', + test=False, + add_expansions_to_env=True, + ), + ), + ), + ( + 'upload coverage', + Function( + shell_mongoc( + r""" + export AWS_ACCESS_KEY_ID=${aws_key} + export AWS_SECRET_ACCESS_KEY=${aws_secret} + aws s3 cp coverage s3://mciuploads/${project}/%s/coverage/ --recursive --acl public-read --region us-east-1 + """ + % (build_path,), + test=False, + silent=True, + ), + s3_put( + build_path + '/coverage/index.html', + aws_key='${aws_key}', + aws_secret='${aws_secret}', + local_file='mongoc/coverage/index.html', + bucket='mciuploads', + permissions='public-read', + content_type='text/html', + display_name='Coverage Report', + ), + ), + ), + ( + 'upload scan artifacts', + Function( + shell_mongoc( + r""" + if find scan -name \*.html | grep -q html; then + (cd scan && find . -name index.html -exec echo "
  • {}
  • " \;) >> scan.html + else + echo "No issues found" > scan.html + fi + """, + ), + shell_mongoc( + r""" + export AWS_ACCESS_KEY_ID=${aws_key} + export AWS_SECRET_ACCESS_KEY=${aws_secret} + aws s3 cp scan s3://mciuploads/${project}/%s/scan/ --recursive --acl public-read --region us-east-1 + """ + % (build_path,), + test=False, + silent=True, + ), + s3_put( + build_path + '/scan/index.html', + aws_key='${aws_key}', + aws_secret='${aws_secret}', + local_file='mongoc/scan.html', + bucket='mciuploads', + permissions='public-read', + content_type='text/html', + display_name='Scan Build Report', + ), + ), + ), + # Use "silent=True" to hide output since errors may contain credentials. + ( + 'run auth tests', + Function( + shell_mongoc( + '.evergreen/scripts/run-auth-tests.sh', + add_expansions_to_env=True, + ), + ), + ), + ( + 'link sample program', + Function( + shell_mongoc( + r""" + # Compile a program that links dynamically or statically to libmongoc, + # using variables from pkg-config or CMake's find_package command. + export BUILD_SAMPLE_WITH_CMAKE=${BUILD_SAMPLE_WITH_CMAKE} + export ENABLE_SSL=${ENABLE_SSL} + export ENABLE_SNAPPY=${ENABLE_SNAPPY} + PATH="${UV_INSTALL_DIR}:$PATH" + LINK_STATIC= .evergreen/scripts/link-sample-program.sh + LINK_STATIC=1 .evergreen/scripts/link-sample-program.sh + """, + include_expansions_in_env=['distro_id', 'UV_INSTALL_DIR'], + ), + ), + ), + ( + 'link sample program bson', + Function( + shell_mongoc( + r""" + # Compile a program that links dynamically or statically to libbson, + # using variables from pkg-config or from CMake's find_package command. + PATH="${UV_INSTALL_DIR}:$PATH" + BUILD_SAMPLE_WITH_CMAKE= LINK_STATIC= .evergreen/scripts/link-sample-program-bson.sh + BUILD_SAMPLE_WITH_CMAKE= LINK_STATIC=1 .evergreen/scripts/link-sample-program-bson.sh + BUILD_SAMPLE_WITH_CMAKE=1 LINK_STATIC= .evergreen/scripts/link-sample-program-bson.sh + BUILD_SAMPLE_WITH_CMAKE=1 LINK_STATIC=1 .evergreen/scripts/link-sample-program-bson.sh + """, + include_expansions_in_env=['distro_id', 'UV_INSTALL_DIR'], + ), + ), + ), + ( + 'link sample program MSVC', + Function( + shell_mongoc( + r""" + # Build libmongoc with CMake and compile a program that links + # dynamically or statically to it, using variables from CMake's + # find_package command. + export ENABLE_SSL=${ENABLE_SSL} + export ENABLE_SNAPPY=${ENABLE_SNAPPY} + PATH="${UV_INSTALL_DIR}:$PATH" + LINK_STATIC= cmd.exe /c .\\.evergreen\\scripts\\link-sample-program-msvc.cmd + LINK_STATIC=1 cmd.exe /c .\\.evergreen\\scripts\\link-sample-program-msvc.cmd + """, + ) + ), + ), + ( + 'link sample program mingw', + Function( + shell_mongoc( + r""" + # Build libmongoc with CMake and compile a program that links + # dynamically to it, using variables from pkg-config.exe. + PATH="${UV_INSTALL_DIR}:$PATH" + cmd.exe /c .\\.evergreen\\scripts\\link-sample-program-mingw.cmd + """, + ) + ), + ), + ( + 'link sample program MSVC bson', + Function( + shell_mongoc( + r""" + # Build libmongoc with CMake and compile a program that links + # dynamically or statically to it, using variables from CMake's + # find_package command. + export ENABLE_SSL=${ENABLE_SSL} + export ENABLE_SNAPPY=${ENABLE_SNAPPY} + PATH="${UV_INSTALL_DIR}:$PATH" + LINK_STATIC= cmd.exe /c .\\.evergreen\\scripts\\link-sample-program-msvc-bson.cmd + LINK_STATIC=1 cmd.exe /c .\\.evergreen\\scripts\\link-sample-program-msvc-bson.cmd + """, + ) + ), + ), + ( + 'link sample program mingw bson', + Function( + shell_mongoc( + r""" + # Build libmongoc with CMake and compile a program that links + # dynamically to it, using variables from pkg-config.exe. + PATH="${UV_INSTALL_DIR}:$PATH" + cmd.exe /c .\\.evergreen\\scripts\\link-sample-program-mingw-bson.cmd + """, + ) + ), + ), + ( + 'update codecov.io', + Function( + shell_mongoc( + r""" + # Note: coverage is currently only enabled on the ubuntu1804 distro. + # This script does not support MacOS, Windows, or non-x86_64 distros. + # Update accordingly if code coverage is expanded to other distros. + curl -Os https://uploader.codecov.io/latest/linux/codecov + chmod +x codecov + # -Z: Exit with a non-zero value if error. + # -g: Run with gcov support. + # -t: Codecov upload token. + # perl: filter verbose "Found" list and "Processing" messages. + ./codecov -Zgt "${codecov_token}" | perl -lne 'print if not m|^.*\.gcov(\.\.\.)?$|' + """, + test=False, + ), + ), + ), + ( + 'compile coverage', + Function( + shell_mongoc( + 'COVERAGE=ON .evergreen/scripts/compile.sh', + add_expansions_to_env=True, + ), + ), + ), + ( + 'run aws tests', + Function( + # Assume role to get AWS secrets. + {'command': 'ec2.assume_role', 'params': {'role_arn': '${aws_test_secrets_role}'}}, + shell_mongoc( + r""" + pushd ../drivers-evergreen-tools/.evergreen/auth_aws + ./setup_secrets.sh drivers/aws_auth + popd # ../drivers-evergreen-tools/.evergreen/auth_aws + """, + include_expansions_in_env=['AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY', 'AWS_SESSION_TOKEN'], + ), + shell_mongoc( + r""" + pushd ../drivers-evergreen-tools/.evergreen/auth_aws + . ./activate-authawsvenv.sh + popd # ../drivers-evergreen-tools/.evergreen/auth_aws + .evergreen/scripts/run-aws-tests.sh + """, + include_expansions_in_env=['TESTCASE'], + ), + ), + ), + ] +) diff --git a/.evergreen/legacy_config_generator/evergreen_config_lib/taskgroups.py b/.evergreen/legacy_config_generator/evergreen_config_lib/taskgroups.py index 50cc79a4472..f0b8473b2a2 100644 --- a/.evergreen/legacy_config_generator/evergreen_config_lib/taskgroups.py +++ b/.evergreen/legacy_config_generator/evergreen_config_lib/taskgroups.py @@ -13,6 +13,7 @@ # limitations under the License. from typing import Sequence + from evergreen_config_generator.taskgroups import TaskGroup all_task_groups: Sequence[TaskGroup] = [] diff --git a/.evergreen/legacy_config_generator/evergreen_config_lib/tasks.py b/.evergreen/legacy_config_generator/evergreen_config_lib/tasks.py index fe4bd8e32d2..2a25d88b5cb 100644 --- a/.evergreen/legacy_config_generator/evergreen_config_lib/tasks.py +++ b/.evergreen/legacy_config_generator/evergreen_config_lib/tasks.py @@ -14,28 +14,26 @@ from collections import OrderedDict as OD from itertools import chain -from typing import ClassVar, Iterable, Literal, Mapping, MutableMapping, MutableSequence, Optional, Sequence +from typing import ClassVar, Iterable, Literal, Mapping, MutableSequence, Optional, Sequence -from config_generator.components.funcs.install_uv import InstallUV - -from evergreen_config_generator import Value, Scalar +from evergreen_config_generator import Value from evergreen_config_generator.functions import func, s3_put from evergreen_config_generator.tasks import ( - both_or_neither, + DependencySpec, MatrixTask, NamedTask, + both_or_neither, prohibit, require, - Task, - DependencySpec, ) -from evergreen_config_lib import shell_mongoc from packaging.version import Version +from config_generator.components.funcs.install_uv import InstallUV +from evergreen_config_lib import shell_mongoc -ToggleStr = Literal["OFF", "ON"] +ToggleStr = Literal['OFF', 'ON'] OptToggleStr = Optional[ToggleStr] -TopologyStr = Literal["server"] +TopologyStr = Literal['server'] class CompileTask(NamedTask): @@ -47,21 +45,21 @@ def __init__( self, task_name: str, tags: Iterable[str] = (), - config: str = "debug", - compression: str | None = "default", + config: str = 'debug', + compression: str | None = 'default', suffix_commands: Iterable[Value] = (), depends_on: Iterable[DependencySpec] = (), prefix_commands: Iterable[Value] = (), - sanitize: Iterable[Literal["undefined", "address", "thread"]] = (), + sanitize: Iterable[Literal['undefined', 'address', 'thread']] = (), *, CFLAGS: str | None = None, LDFLAGS: str | None = None, EXTRA_CONFIGURE_FLAGS: str | None = None, - SSL: Literal["WINDOWS", "DARWIN", "OPENSSL", "OPENSSL_STATIC", "OFF", None] = None, + SSL: Literal['WINDOWS', 'DARWIN', 'OPENSSL', 'OPENSSL_STATIC', 'OFF', None] = None, ENABLE_SHM_COUNTERS: OptToggleStr = None, CHECK_LOG: OptToggleStr = None, TRACING: OptToggleStr = None, - SASL: Literal[None, "OFF", "AUTO", "CYRUS", "SSPI"] = None, + SASL: Literal[None, 'OFF', 'AUTO', 'CYRUS', 'SSPI'] = None, ENABLE_RDTSCP: OptToggleStr = None, SRV: OptToggleStr = None, ): @@ -73,38 +71,38 @@ def __init__( # Environment variables for .evergreen/scripts/compile.sh. self.compile_sh_opt: dict[str, str] = {} - if config != "debug": - assert config == "release" - self.compile_sh_opt["RELEASE"] = "ON" + if config != 'debug': + assert config == 'release' + self.compile_sh_opt['RELEASE'] = 'ON' if CFLAGS: - self.compile_sh_opt["CFLAGS"] = CFLAGS + self.compile_sh_opt['CFLAGS'] = CFLAGS if LDFLAGS: - self.compile_sh_opt["LDFLAGS"] = LDFLAGS + self.compile_sh_opt['LDFLAGS'] = LDFLAGS if EXTRA_CONFIGURE_FLAGS: - self.compile_sh_opt["EXTRA_CONFIGURE_FLAGS"] = EXTRA_CONFIGURE_FLAGS + self.compile_sh_opt['EXTRA_CONFIGURE_FLAGS'] = EXTRA_CONFIGURE_FLAGS if SSL: - self.compile_sh_opt["SSL"] = SSL + self.compile_sh_opt['SSL'] = SSL if ENABLE_SHM_COUNTERS: - self.compile_sh_opt["ENABLE_SHM_COUNTERS"] = ENABLE_SHM_COUNTERS + self.compile_sh_opt['ENABLE_SHM_COUNTERS'] = ENABLE_SHM_COUNTERS if CHECK_LOG: - self.compile_sh_opt["CHECK_LOG"] = CHECK_LOG + self.compile_sh_opt['CHECK_LOG'] = CHECK_LOG if TRACING: - self.compile_sh_opt["TRACING"] = TRACING + self.compile_sh_opt['TRACING'] = TRACING if SASL: - self.compile_sh_opt["SASL"] = SASL + self.compile_sh_opt['SASL'] = SASL if ENABLE_RDTSCP: - self.compile_sh_opt["ENABLE_RDTSCP"] = ENABLE_RDTSCP + self.compile_sh_opt['ENABLE_RDTSCP'] = ENABLE_RDTSCP if SRV: - self.compile_sh_opt["SRV"] = SRV + self.compile_sh_opt['SRV'] = SRV - if compression != "default": - self.compile_sh_opt["SNAPPY"] = "ON" if compression in ("all", "snappy") else "OFF" - self.compile_sh_opt["ZLIB"] = "BUNDLED" if compression in ("all", "zlib") else "OFF" - self.compile_sh_opt["ZSTD"] = "ON" if compression in ("all", "zstd") else "OFF" + if compression != 'default': + self.compile_sh_opt['SNAPPY'] = 'ON' if compression in ('all', 'snappy') else 'OFF' + self.compile_sh_opt['ZLIB'] = 'BUNDLED' if compression in ('all', 'zlib') else 'OFF' + self.compile_sh_opt['ZSTD'] = 'ON' if compression in ('all', 'zstd') else 'OFF' if sanitize: - self.compile_sh_opt["SANITIZE"] = ",".join(sanitize) + self.compile_sh_opt['SANITIZE'] = ','.join(sanitize) self.compile_sh_opt.update(type(self).cls_compile_sh_env) @@ -113,20 +111,20 @@ def additional_script_env(self) -> Mapping[str, str]: def to_dict(self): task = super(CompileTask, self).to_dict() - commands = task["commands"] + commands = task['commands'] assert isinstance(commands, MutableSequence), task commands.extend(self.prefix_commands) - script = "env" + script = 'env' for opt, value in sorted(self.compile_sh_opt.items()): script += ' %s="%s"' % (opt, value) - script += " .evergreen/scripts/compile.sh" + script += ' .evergreen/scripts/compile.sh' commands.append(func(InstallUV.name)) commands.append(shell_mongoc(script, add_expansions_to_env=True)) - commands.append(func("upload-build")) + commands.append(func('upload-build')) commands.extend(self.suffix_commands) return task @@ -136,38 +134,38 @@ def additional_tags(self) -> Iterable[str]: class SpecialTask(CompileTask): - cls_tags: ClassVar[Sequence[str]] = ["special"] + cls_tags: ClassVar[Sequence[str]] = ['special'] class CompileWithClientSideEncryption(CompileTask): cls_compile_sh_env: ClassVar[Mapping[str, str]] = dict( # Compiling with ClientSideEncryption support requires linking against the library libmongocrypt. - COMPILE_LIBMONGOCRYPT="ON", - EXTRA_CONFIGURE_FLAGS="-DENABLE_PIC=ON", + COMPILE_LIBMONGOCRYPT='ON', + EXTRA_CONFIGURE_FLAGS='-DENABLE_PIC=ON', ) - cls_tags: ClassVar[Sequence[str]] = "client-side-encryption", "special" + cls_tags: ClassVar[Sequence[str]] = 'client-side-encryption', 'special' class CompileWithClientSideEncryptionAsan(CompileTask): cls_compile_sh_env: ClassVar[Mapping[str, str]] = dict( - CFLAGS="-fno-omit-frame-pointer", - COMPILE_LIBMONGOCRYPT="ON", - CHECK_LOG="ON", - PATH="/usr/lib/llvm-3.8/bin:$PATH", + CFLAGS='-fno-omit-frame-pointer', + COMPILE_LIBMONGOCRYPT='ON', + CHECK_LOG='ON', + PATH='/usr/lib/llvm-3.8/bin:$PATH', ) - cls_tags: ClassVar[Sequence[str]] = ["client-side-encryption"] - cls_sanitize: ClassVar[Sequence[str]] = ["address"] + cls_tags: ClassVar[Sequence[str]] = ['client-side-encryption'] + cls_sanitize: ClassVar[Sequence[str]] = ['address'] class LinkTask(NamedTask): def __init__( - self, task_name: str, suffix_commands: Iterable[Value], orchestration: Literal[True, False, "ssl"] = True + self, task_name: str, suffix_commands: Iterable[Value], orchestration: Literal[True, False, 'ssl'] = True ): - if orchestration == "ssl": + if orchestration == 'ssl': # Actual value of SSL does not matter here so long as it is not 'nossl'. - bootstrap_commands = [func("fetch-det"), func("bootstrap-mongo-orchestration", SSL="openssl")] + bootstrap_commands = [func('fetch-det'), func('bootstrap-mongo-orchestration', SSL='openssl')] elif orchestration: - bootstrap_commands = [func("fetch-det"), func("bootstrap-mongo-orchestration")] + bootstrap_commands = [func('fetch-det'), func('bootstrap-mongo-orchestration')] else: bootstrap_commands = [] @@ -181,106 +179,114 @@ def __init__( all_tasks = [ CompileTask( - "hardened-compile", - tags=["hardened"], + 'hardened-compile', + tags=['hardened'], compression=None, - CFLAGS="-fno-strict-overflow -D_FORTIFY_SOURCE=2 -fstack-protector-all -fPIE -O", - LDFLAGS="-pie -Wl,-z,relro -Wl,-z,now", + CFLAGS='-fno-strict-overflow -D_FORTIFY_SOURCE=2 -fstack-protector-all -fPIE -O', + LDFLAGS='-pie -Wl,-z,relro -Wl,-z,now', ), - CompileTask("debug-compile-compression-zlib", tags=["zlib", "compression"], compression="zlib"), - CompileTask("debug-compile-compression-snappy", tags=["snappy", "compression"], compression="snappy"), - CompileTask("debug-compile-compression-zstd", tags=["zstd", "compression"], compression="zstd"), - CompileTask("debug-compile-nosasl-nossl", tags=["debug-compile", "nosasl", "nossl"], SSL="OFF"), - CompileTask("debug-compile-lto", CFLAGS="-flto"), - CompileTask("debug-compile-lto-thin", CFLAGS="-flto=thin"), - CompileTask("debug-compile-no-counters", tags=["debug-compile", "no-counters"], ENABLE_SHM_COUNTERS="OFF"), - CompileTask("compile-tracing", TRACING="ON", CFLAGS="-Werror -Wno-cast-align"), - CompileTask("release-compile", config="release"), - CompileTask("debug-compile-nosasl-openssl", tags=["debug-compile", "nosasl", "openssl"], SSL="OPENSSL"), - CompileTask("debug-compile-nosasl-darwinssl", tags=["debug-compile", "nosasl", "darwinssl"], SSL="DARWIN"), - CompileTask("debug-compile-nosasl-winssl", tags=["debug-compile", "nosasl", "winssl"], SSL="WINDOWS"), - CompileTask("debug-compile-sasl-openssl", tags=["debug-compile", "sasl", "openssl"], SASL="AUTO", SSL="OPENSSL"), - CompileTask("debug-compile-sasl-darwinssl", tags=["debug-compile", "sasl", "darwinssl"], SASL="AUTO", SSL="DARWIN"), - CompileTask("debug-compile-rdtscp", ENABLE_RDTSCP="ON"), - CompileTask("debug-compile-sspi-winssl", tags=["debug-compile", "sspi", "winssl"], SASL="SSPI", SSL="WINDOWS"), - CompileTask("debug-compile-nosrv", tags=["debug-compile"], SRV="OFF"), - LinkTask("link-with-cmake", suffix_commands=[func("link sample program", BUILD_SAMPLE_WITH_CMAKE=1)]), + CompileTask('debug-compile-compression-zlib', tags=['zlib', 'compression'], compression='zlib'), + CompileTask('debug-compile-compression-snappy', tags=['snappy', 'compression'], compression='snappy'), + CompileTask('debug-compile-compression-zstd', tags=['zstd', 'compression'], compression='zstd'), + CompileTask('debug-compile-nosasl-nossl', tags=['debug-compile', 'nosasl', 'nossl'], SSL='OFF'), + CompileTask('debug-compile-lto', CFLAGS='-flto'), + CompileTask('debug-compile-lto-thin', CFLAGS='-flto=thin'), + CompileTask('debug-compile-no-counters', tags=['debug-compile', 'no-counters'], ENABLE_SHM_COUNTERS='OFF'), + CompileTask('compile-tracing', TRACING='ON', CFLAGS='-Werror -Wno-cast-align'), + CompileTask('release-compile', config='release'), + CompileTask('debug-compile-nosasl-openssl', tags=['debug-compile', 'nosasl', 'openssl'], SSL='OPENSSL'), + CompileTask('debug-compile-nosasl-darwinssl', tags=['debug-compile', 'nosasl', 'darwinssl'], SSL='DARWIN'), + CompileTask('debug-compile-nosasl-winssl', tags=['debug-compile', 'nosasl', 'winssl'], SSL='WINDOWS'), + CompileTask('debug-compile-sasl-openssl', tags=['debug-compile', 'sasl', 'openssl'], SASL='AUTO', SSL='OPENSSL'), + CompileTask('debug-compile-sasl-darwinssl', tags=['debug-compile', 'sasl', 'darwinssl'], SASL='AUTO', SSL='DARWIN'), + CompileTask('debug-compile-rdtscp', ENABLE_RDTSCP='ON'), + CompileTask('debug-compile-sspi-winssl', tags=['debug-compile', 'sspi', 'winssl'], SASL='SSPI', SSL='WINDOWS'), + CompileTask('debug-compile-nosrv', tags=['debug-compile'], SRV='OFF'), + LinkTask('link-with-cmake', suffix_commands=[func('link sample program', BUILD_SAMPLE_WITH_CMAKE=1)]), LinkTask( - "link-with-cmake-ssl", - suffix_commands=[func("link sample program", BUILD_SAMPLE_WITH_CMAKE=1, ENABLE_SSL=1)], + 'link-with-cmake-ssl', + suffix_commands=[func('link sample program', BUILD_SAMPLE_WITH_CMAKE=1, ENABLE_SSL=1)], ), LinkTask( - "link-with-cmake-snappy", - suffix_commands=[func("link sample program", BUILD_SAMPLE_WITH_CMAKE=1, ENABLE_SNAPPY="ON")], + 'link-with-cmake-snappy', + suffix_commands=[func('link sample program', BUILD_SAMPLE_WITH_CMAKE=1, ENABLE_SNAPPY='ON')], ), - LinkTask("link-with-cmake-mac", suffix_commands=[func("link sample program", BUILD_SAMPLE_WITH_CMAKE=1)]), - LinkTask("link-with-cmake-windows", suffix_commands=[func("link sample program MSVC")]), + LinkTask('link-with-cmake-mac', suffix_commands=[func('link sample program', BUILD_SAMPLE_WITH_CMAKE=1)]), + LinkTask('link-with-cmake-windows', suffix_commands=[func('link sample program MSVC')]), LinkTask( - "link-with-cmake-windows-ssl", - suffix_commands=[func("link sample program MSVC", ENABLE_SSL=1)], - orchestration="ssl", + 'link-with-cmake-windows-ssl', + suffix_commands=[func('link sample program MSVC', ENABLE_SSL=1)], + orchestration='ssl', ), - LinkTask("link-with-cmake-windows-snappy", suffix_commands=[func("link sample program MSVC", ENABLE_SNAPPY="ON")]), - LinkTask("link-with-cmake-mingw", suffix_commands=[func("link sample program mingw")]), - LinkTask("link-with-pkg-config", suffix_commands=[func("link sample program")]), - LinkTask("link-with-pkg-config-mac", suffix_commands=[func("link sample program")]), - LinkTask("link-with-pkg-config-ssl", suffix_commands=[func("link sample program", ENABLE_SSL=1)]), - LinkTask("link-with-bson", suffix_commands=[func("link sample program bson")], orchestration=False), - LinkTask("link-with-bson-mac", suffix_commands=[func("link sample program bson")], orchestration=False), - LinkTask("link-with-bson-windows", suffix_commands=[func("link sample program MSVC bson")], orchestration=False), - LinkTask("link-with-bson-mingw", suffix_commands=[func("link sample program mingw bson")], orchestration=False), + LinkTask('link-with-cmake-windows-snappy', suffix_commands=[func('link sample program MSVC', ENABLE_SNAPPY='ON')]), + LinkTask('link-with-cmake-mingw', suffix_commands=[func('link sample program mingw')]), + LinkTask('link-with-pkg-config', suffix_commands=[func('link sample program')]), + LinkTask('link-with-pkg-config-mac', suffix_commands=[func('link sample program')]), + LinkTask('link-with-pkg-config-ssl', suffix_commands=[func('link sample program', ENABLE_SSL=1)]), + LinkTask('link-with-bson', suffix_commands=[func('link sample program bson')], orchestration=False), + LinkTask('link-with-bson-mac', suffix_commands=[func('link sample program bson')], orchestration=False), + LinkTask('link-with-bson-windows', suffix_commands=[func('link sample program MSVC bson')], orchestration=False), + LinkTask('link-with-bson-mingw', suffix_commands=[func('link sample program mingw bson')], orchestration=False), NamedTask( - "debian-package-build", + 'debian-package-build', commands=[ - shell_mongoc('export IS_PATCH="${is_patch}"\n' ".evergreen/scripts/debian_package_build.sh"), + shell_mongoc('export IS_PATCH="${is_patch}"\n.evergreen/scripts/debian_package_build.sh'), s3_put( - local_file="deb.tar.gz", - remote_file="${branch_name}/mongo-c-driver-debian-packages-${CURRENT_VERSION}.tar.gz", - content_type="${content_type|application/x-gzip}", + local_file='deb.tar.gz', + remote_file='${branch_name}/mongo-c-driver-debian-packages-${CURRENT_VERSION}.tar.gz', + content_type='${content_type|application/x-gzip}', ), s3_put( - local_file="deb.tar.gz", - remote_file="${branch_name}/${revision}/${version_id}/${build_id}/${execution}/mongo-c-driver-debian-packages.tar.gz", - content_type="${content_type|application/x-gzip}", + local_file='deb.tar.gz', + remote_file='${branch_name}/${revision}/${version_id}/${build_id}/${execution}/mongo-c-driver-debian-packages.tar.gz', + content_type='${content_type|application/x-gzip}', ), s3_put( - local_file="deb-i386.tar.gz", - remote_file="${branch_name}/mongo-c-driver-debian-packages-i386-${CURRENT_VERSION}.tar.gz", - content_type="${content_type|application/x-gzip}", + local_file='deb-i386.tar.gz', + remote_file='${branch_name}/mongo-c-driver-debian-packages-i386-${CURRENT_VERSION}.tar.gz', + content_type='${content_type|application/x-gzip}', ), s3_put( - local_file="deb-i386.tar.gz", - remote_file="${branch_name}/${revision}/${version_id}/${build_id}/${execution}/mongo-c-driver-debian-packages-i386.tar.gz", - content_type="${content_type|application/x-gzip}", + local_file='deb-i386.tar.gz', + remote_file='${branch_name}/${revision}/${version_id}/${build_id}/${execution}/mongo-c-driver-debian-packages-i386.tar.gz', + content_type='${content_type|application/x-gzip}', ), ], ), NamedTask( - "rpm-package-build", + 'rpm-package-build', commands=[ - shell_mongoc('export IS_PATCH="${is_patch}"\n' ".evergreen/scripts/check_rpm_spec.sh"), - shell_mongoc(".evergreen/scripts/build_snapshot_rpm.sh"), + shell_mongoc('export IS_PATCH="${is_patch}"\n.evergreen/scripts/check_rpm_spec.sh'), + shell_mongoc('.evergreen/scripts/build_snapshot_rpm.sh'), s3_put( - local_file="rpm.tar.gz", - remote_file="${branch_name}/mongo-c-driver-rpm-packages-${CURRENT_VERSION}.tar.gz", - content_type="${content_type|application/x-gzip}", + local_file='rpm.tar.gz', + remote_file='${branch_name}/mongo-c-driver-rpm-packages-${CURRENT_VERSION}.tar.gz', + content_type='${content_type|application/x-gzip}', ), s3_put( - local_file="rpm.tar.gz", - remote_file="${branch_name}/${revision}/${version_id}/${build_id}/${execution}/mongo-c-driver-rpm-packages.tar.gz", - content_type="${content_type|application/x-gzip}", + local_file='rpm.tar.gz', + remote_file='${branch_name}/${revision}/${version_id}/${build_id}/${execution}/mongo-c-driver-rpm-packages.tar.gz', + content_type='${content_type|application/x-gzip}', + ), + shell_mongoc( + 'sudo rm -rf ../build ../mock-result ../rpm.tar.gz\n' + 'export MOCK_TARGET_CONFIG=rocky+epel-9-aarch64\n' + '.evergreen/scripts/build_snapshot_rpm.sh' + ), + shell_mongoc( + 'sudo rm -rf ../build ../mock-result ../rpm.tar.gz\n' + 'export MOCK_TARGET_CONFIG=rocky+epel-8-aarch64\n' + '.evergreen/scripts/build_snapshot_rpm.sh' ), - shell_mongoc("sudo rm -rf ../build ../mock-result ../rpm.tar.gz\n" "export MOCK_TARGET_CONFIG=rocky+epel-9-aarch64\n" ".evergreen/scripts/build_snapshot_rpm.sh"), - shell_mongoc("sudo rm -rf ../build ../mock-result ../rpm.tar.gz\n" "export MOCK_TARGET_CONFIG=rocky+epel-8-aarch64\n" ".evergreen/scripts/build_snapshot_rpm.sh"), ], ), - CompileTask("debug-compile-with-warnings", CFLAGS="-Werror -Wno-cast-align"), + CompileTask('debug-compile-with-warnings', CFLAGS='-Werror -Wno-cast-align'), NamedTask( - "install-libmongoc-after-libbson", + 'install-libmongoc-after-libbson', commands=[ func(InstallUV.name), shell_mongoc( - ".evergreen/scripts/install-libmongoc-after-libbson.sh", + '.evergreen/scripts/install-libmongoc-after-libbson.sh', include_expansions_in_env=['distro_id', 'UV_INSTALL_DIR'], ), ], @@ -291,29 +297,29 @@ def __init__( class CoverageTask(MatrixTask): axes = OD( [ - ("version", ["latest"]), - ("topology", ["replica_set"]), - ("auth", [True]), - ("sasl", ["sasl"]), - ("ssl", ["openssl"]), - ("cse", [False, True]), + ('version', ['latest']), + ('topology', ['replica_set']), + ('auth', [True]), + ('sasl', ['sasl']), + ('ssl', ['openssl']), + ('cse', [False, True]), ] ) def additional_tags(self) -> Iterable[str]: yield from super().additional_tags() - yield "test-coverage" + yield 'test-coverage' yield str(self.settings.version) if self.cse: - yield "client-side-encryption" + yield 'client-side-encryption' def name_parts(self) -> Iterable[str]: - yield "test-coverage" - yield self.display("version") - yield self.display("topology").replace("_", "-") - yield from map(self.display, ("auth", "sasl", "ssl")) + yield 'test-coverage' + yield self.display('version') + yield self.display('topology').replace('_', '-') + yield from map(self.display, ('auth', 'sasl', 'ssl')) if self.settings.cse: - yield "cse" + yield 'cse' @property def cse(self) -> bool: @@ -323,38 +329,38 @@ def post_commands(self) -> Iterable[Value]: yield func(InstallUV.name) if self.cse: yield func( - "compile coverage", - SASL="AUTO", - SSL="OPENSSL", - COMPILE_LIBMONGOCRYPT="ON", + 'compile coverage', + SASL='AUTO', + SSL='OPENSSL', + COMPILE_LIBMONGOCRYPT='ON', EXTRA_CONFIGURE_FLAGS='EXTRA_CONFIGURE_FLAGS="-DENABLE_PIC=ON"', ) else: - yield func("compile coverage", SASL="AUTO", SSL="OPENSSL") + yield func('compile coverage', SASL='AUTO', SSL='OPENSSL') - yield func("fetch-det") + yield func('fetch-det') yield func( - "bootstrap-mongo-orchestration", + 'bootstrap-mongo-orchestration', MONGODB_VERSION=self.settings.version, TOPOLOGY=self.settings.topology, - AUTH=self.display("auth"), - SSL=self.display("ssl"), + AUTH=self.display('auth'), + SSL=self.display('ssl'), ) - yield func("run-simple-http-server") - extra = {"COVERAGE": "ON"} + yield func('run-simple-http-server') + extra = {'COVERAGE': 'ON'} if self.cse: - extra["CLIENT_SIDE_ENCRYPTION"] = "ON" - yield func("run-mock-kms-servers") - yield func("run-tests", AUTH=self.display("auth"), SSL=self.display("ssl"), **extra) - yield func("upload coverage") - yield func("update codecov.io") + extra['CLIENT_SIDE_ENCRYPTION'] = 'ON' + yield func('run-mock-kms-servers') + yield func('run-tests', AUTH=self.display('auth'), SSL=self.display('ssl'), **extra) + yield func('upload coverage') + yield func('update codecov.io') def do_is_valid_combination(self) -> bool: # Limit coverage tests to test-coverage-latest-replica-set-auth-sasl-openssl (+ cse). - require(self.setting_eq("topology", "replica_set")) - require(self.setting_eq("sasl", "sasl")) - require(self.setting_eq("ssl", "openssl")) - require(self.setting_eq("version", "latest")) + require(self.setting_eq('topology', 'replica_set')) + require(self.setting_eq('sasl', 'sasl')) + require(self.setting_eq('ssl', 'openssl')) + require(self.setting_eq('version', 'latest')) require(self.settings.auth is True) if not self.cse: @@ -362,9 +368,9 @@ def do_is_valid_combination(self) -> bool: return True # CSE has extra requirements - if self.settings.version != "latest": + if self.settings.version != 'latest': # We only work with 4.2 or newer for CSE - require(Version(str(self.settings.version)) >= Version("4.2")) + require(Version(str(self.settings.version)) >= Version('4.2')) return True @@ -374,64 +380,64 @@ def do_is_valid_combination(self) -> bool: class DNSTask(MatrixTask): axes = OD( [ - ("auth", [False, True]), - ("loadbalanced", [False, True]), - ("ssl", ["openssl", "winssl", "darwinssl"]), + ('auth', [False, True]), + ('loadbalanced', [False, True]), + ('ssl', ['openssl', 'winssl', 'darwinssl']), ] ) - name_prefix = "test-dns" + name_prefix = 'test-dns' def additional_dependencies(self) -> Iterable[DependencySpec]: yield self.build_task_name @property def build_task_name(self) -> str: - sasl = "sspi" if self.settings.ssl == "winssl" else "sasl" + sasl = 'sspi' if self.settings.ssl == 'winssl' else 'sasl' return f'debug-compile-{sasl}-{self.display("ssl")}' def name_parts(self) -> Iterable[str]: - yield "test-dns" + yield 'test-dns' if self.settings.auth: - yield "auth" + yield 'auth' if self.settings.loadbalanced: - yield "loadbalanced" - yield self.display("ssl") + yield 'loadbalanced' + yield self.display('ssl') def post_commands(self) -> Iterable[Value]: - yield func("fetch-build", BUILD_NAME=self.build_task_name) - yield func("fetch-det") + yield func('fetch-build', BUILD_NAME=self.build_task_name) + yield func('fetch-det') if self.settings.loadbalanced: orchestration = func( - "bootstrap-mongo-orchestration", - TOPOLOGY="sharded_cluster", - AUTH="auth" if self.settings.auth else "noauth", - SSL="ssl", - LOAD_BALANCER="on", + 'bootstrap-mongo-orchestration', + TOPOLOGY='sharded_cluster', + AUTH='auth' if self.settings.auth else 'noauth', + SSL='ssl', + LOAD_BALANCER='on', ) else: orchestration = func( - "bootstrap-mongo-orchestration", - TOPOLOGY="replica_set", - AUTH="auth" if self.settings.auth else "noauth", - SSL="ssl", + 'bootstrap-mongo-orchestration', + TOPOLOGY='replica_set', + AUTH='auth' if self.settings.auth else 'noauth', + SSL='ssl', ) yield orchestration - dns = "on" + dns = 'on' if self.settings.loadbalanced: - dns = "loadbalanced" - yield func("fetch-det") - yield func("start-load-balancer", MONGODB_URI="mongodb://localhost:27017,localhost:27018") + dns = 'loadbalanced' + yield func('fetch-det') + yield func('start-load-balancer', MONGODB_URI='mongodb://localhost:27017,localhost:27018') elif self.settings.auth: - dns = "dns-auth" - yield func("run-tests", SSL="ssl", AUTH=self.display("auth"), DNS=dns) + dns = 'dns-auth' + yield func('run-tests', SSL='ssl', AUTH=self.display('auth'), DNS=dns) def do_is_valid_combination(self) -> bool: prohibit(bool(self.settings.loadbalanced) and bool(self.settings.auth)) # Load balancer tests only run on some Linux hosts in Evergreen until CDRIVER-4041 is resolved. - prohibit(bool(self.settings.loadbalanced) and self.settings.ssl in ["darwinssl", "winssl"]) + prohibit(bool(self.settings.loadbalanced) and self.settings.ssl in ['darwinssl', 'winssl']) return True @@ -439,52 +445,52 @@ def do_is_valid_combination(self) -> bool: class CompressionTask(MatrixTask): - axes = OD([("compression", ["zlib", "snappy", "zstd"])]) - name_prefix = "test-latest-server" + axes = OD([('compression', ['zlib', 'snappy', 'zstd'])]) + name_prefix = 'test-latest-server' def additional_dependencies(self) -> Iterable[DependencySpec]: yield self.build_task_name @property def build_task_name(self) -> str: - return f"debug-compile-{self._compressor_suffix()}" + return f'debug-compile-{self._compressor_suffix()}' def additional_tags(self) -> Iterable[str]: yield from super().additional_tags() - yield "compression" - yield "latest" + yield 'compression' + yield 'latest' yield from self._compressor_list() def name_parts(self) -> Iterable[str]: return [self.name_prefix, self._compressor_suffix()] def post_commands(self) -> Iterable[Value]: - yield func("fetch-build", BUILD_NAME=self.build_task_name) - yield func("fetch-det") - yield func("bootstrap-mongo-orchestration", AUTH="noauth", SSL="nossl") + yield func('fetch-build', BUILD_NAME=self.build_task_name) + yield func('fetch-det') + yield func('bootstrap-mongo-orchestration', AUTH='noauth', SSL='nossl') yield func(InstallUV.name) - yield func("run-simple-http-server") - yield func("run-tests", AUTH="noauth", SSL="nossl", COMPRESSORS=",".join(self._compressor_list())) + yield func('run-simple-http-server') + yield func('run-tests', AUTH='noauth', SSL='nossl', COMPRESSORS=','.join(self._compressor_list())) def _compressor_suffix(self): - if self.settings.compression == "zlib": - return "compression-zlib" - elif self.settings.compression == "snappy": - return "compression-snappy" - elif self.settings.compression == "zstd": - return "compression-zstd" + if self.settings.compression == 'zlib': + return 'compression-zlib' + elif self.settings.compression == 'snappy': + return 'compression-snappy' + elif self.settings.compression == 'zstd': + return 'compression-zstd' else: - return "compression" + return 'compression' def _compressor_list(self): - if self.settings.compression == "zlib": - return ["zlib"] - elif self.settings.compression == "snappy": - return ["snappy"] - elif self.settings.compression == "zstd": - return ["zstd"] + if self.settings.compression == 'zlib': + return ['zlib'] + elif self.settings.compression == 'snappy': + return ['snappy'] + elif self.settings.compression == 'zstd': + return ['zstd'] else: - return ["snappy", "zlib", "zstd"] + return ['snappy', 'zlib', 'zstd'] all_tasks = chain(all_tasks, CompressionTask.matrix()) @@ -494,11 +500,11 @@ class SpecialIntegrationTask(NamedTask): def __init__( self, task_name: str, - main_dep: str = "debug-compile-sasl-openssl", + main_dep: str = 'debug-compile-sasl-openssl', uri: str | None = None, tags: Iterable[str] = (), - version: str = "latest", - topology: str = "server", + version: str = 'latest', + topology: str = 'server', ): self._main_dep = main_dep super().__init__(task_name, depends_on=[self._main_dep], tags=tags) @@ -507,41 +513,41 @@ def __init__( self._topo = topology def pre_commands(self) -> Iterable[Value]: - yield func("fetch-build", BUILD_NAME=self._main_dep) - yield func("fetch-det") - yield func("bootstrap-mongo-orchestration", MONGODB_VERSION=self._version, TOPOLOGY=self._topo) + yield func('fetch-build', BUILD_NAME=self._main_dep) + yield func('fetch-det') + yield func('bootstrap-mongo-orchestration', MONGODB_VERSION=self._version, TOPOLOGY=self._topo) yield func(InstallUV.name) - yield func("run-simple-http-server") - yield func("run-tests", URI=self._uri) + yield func('run-simple-http-server') + yield func('run-tests', URI=self._uri) all_tasks = chain( all_tasks, [ # Verify that retryWrites=true is ignored with standalone. - SpecialIntegrationTask("retry-true-latest-server", uri="mongodb://localhost/?retryWrites=true"), - SpecialIntegrationTask("test-latest-server-hardened", "hardened-compile", tags=["hardened", "latest"]), + SpecialIntegrationTask('retry-true-latest-server', uri='mongodb://localhost/?retryWrites=true'), + SpecialIntegrationTask('test-latest-server-hardened', 'hardened-compile', tags=['hardened', 'latest']), ], ) class AuthTask(MatrixTask): - axes = OD([("sasl", ["sasl", "sspi", False]), ("ssl", ["openssl", "darwinssl", "winssl"])]) + axes = OD([('sasl', ['sasl', 'sspi', False]), ('ssl', ['openssl', 'darwinssl', 'winssl'])]) - name_prefix = "authentication-tests" + name_prefix = 'authentication-tests' def additional_tags(self) -> Iterable[str]: yield from super().additional_tags() - yield "authentication-tests" - yield self.display("ssl") - yield self.display("sasl") + yield 'authentication-tests' + yield self.display('ssl') + yield self.display('sasl') def additional_dependencies(self) -> Iterable[DependencySpec]: yield self.build_task_name def post_commands(self) -> Iterable[Value]: - yield func("fetch-build", BUILD_NAME=self.build_task_name) - yield func("run auth tests") + yield func('fetch-build', BUILD_NAME=self.build_task_name) + yield func('run auth tests') @property def build_task_name(self) -> str: @@ -549,14 +555,14 @@ def build_task_name(self) -> str: def name_parts(self) -> Iterable[str]: yield self.name_prefix - yield self.display("ssl") + yield self.display('ssl') if not self.settings.sasl: - yield "nosasl" + yield 'nosasl' def do_is_valid_combination(self) -> bool: - both_or_neither(self.settings.ssl == "winssl", self.settings.sasl == "sspi") + both_or_neither(self.settings.ssl == 'winssl', self.settings.sasl == 'sspi') if not self.settings.sasl: - require(self.settings.ssl == "openssl") + require(self.settings.ssl == 'openssl') return True @@ -569,15 +575,15 @@ def __init__(self, name: str, tags: Iterable[str], get_build: str, commands: Ite self._dep = get_build def pre_commands(self) -> Iterable[Value]: - yield func("fetch-build", BUILD_NAME=self._dep) + yield func('fetch-build', BUILD_NAME=self._dep) all_tasks = chain( all_tasks, [ NamedTask( - "authentication-tests-asan-memcheck", - tags=["authentication-tests", "asan"], + 'authentication-tests-asan-memcheck', + tags=['authentication-tests', 'asan'], commands=[ func(InstallUV.name), shell_mongoc( @@ -586,89 +592,89 @@ def pre_commands(self) -> Iterable[Value]: """, add_expansions_to_env=True, ), - func("run auth tests", ASAN="on"), + func('run auth tests', ASAN='on'), ], - ) + ), ], ) # Add API version tasks. -for server_version in [ "8.0", "7.0", "6.0", "5.0"]: +for server_version in ['8.0', '7.0', '6.0', '5.0']: all_tasks = chain( all_tasks, [ PostCompileTask( - "test-versioned-api-" + server_version, - tags=["versioned-api", f"{server_version}"], - get_build="debug-compile-nosasl-openssl", + 'test-versioned-api-' + server_version, + tags=['versioned-api', f'{server_version}'], + get_build='debug-compile-nosasl-openssl', commands=[ - func("fetch-det"), + func('fetch-det'), func( - "bootstrap-mongo-orchestration", - TOPOLOGY="server", - AUTH="auth", - SSL="ssl", + 'bootstrap-mongo-orchestration', + TOPOLOGY='server', + AUTH='auth', + SSL='ssl', MONGODB_VERSION=server_version, - REQUIRE_API_VERSION="true", + REQUIRE_API_VERSION='true', ), func(InstallUV.name), - func("run-simple-http-server"), - func("run-tests", MONGODB_API_VERSION=1, AUTH="auth", SSL="ssl"), + func('run-simple-http-server'), + func('run-tests', MONGODB_API_VERSION=1, AUTH='auth', SSL='ssl'), ], ), PostCompileTask( - "test-versioned-api-accept-version-two-" + server_version, - tags=["versioned-api", f"{server_version}"], - get_build="debug-compile-nosasl-nossl", + 'test-versioned-api-accept-version-two-' + server_version, + tags=['versioned-api', f'{server_version}'], + get_build='debug-compile-nosasl-nossl', commands=[ - func("fetch-det"), + func('fetch-det'), func( - "bootstrap-mongo-orchestration", - TOPOLOGY="server", - AUTH="noauth", - SSL="nossl", + 'bootstrap-mongo-orchestration', + TOPOLOGY='server', + AUTH='noauth', + SSL='nossl', MONGODB_VERSION=server_version, - ORCHESTRATION_FILE="versioned-api-testing.json", + ORCHESTRATION_FILE='versioned-api-testing.json', ), func(InstallUV.name), - func("run-simple-http-server"), - func("run-tests", MONGODB_API_VERSION=1, AUTH="noauth", SSL="nossl"), + func('run-simple-http-server'), + func('run-tests', MONGODB_API_VERSION=1, AUTH='noauth', SSL='nossl'), ], - ) - ] + ), + ], ) class IPTask(MatrixTask): axes = OD( [ - ("client", ["ipv6", "ipv4", "localhost"]), - ("server", ["ipv6", "ipv4"]), + ('client', ['ipv6', 'ipv4', 'localhost']), + ('server', ['ipv6', 'ipv4']), ] ) - name_prefix = "test-latest" + name_prefix = 'test-latest' def additional_dependencies(self) -> Iterable[DependencySpec]: - yield "debug-compile-nosasl-nossl" + yield 'debug-compile-nosasl-nossl' def additional_tags(self) -> Iterable[str]: yield from super().additional_tags() - yield from ("nossl", "nosasl", "server", "ipv4-ipv6", "latest") + yield from ('nossl', 'nosasl', 'server', 'ipv4-ipv6', 'latest') def post_commands(self) -> Iterable[Value]: return [ - func("fetch-build", BUILD_NAME="debug-compile-nosasl-nossl"), - func("fetch-det"), - func("bootstrap-mongo-orchestration"), + func('fetch-build', BUILD_NAME='debug-compile-nosasl-nossl'), + func('fetch-det'), + func('bootstrap-mongo-orchestration'), func(InstallUV.name), - func("run-simple-http-server"), + func('run-simple-http-server'), func( - "run-tests", + 'run-tests', URI={ - "ipv6": "mongodb://[::1]/", - "ipv4": "mongodb://127.0.0.1/", - "localhost": "mongodb://localhost/", + 'ipv6': 'mongodb://[::1]/', + 'ipv4': 'mongodb://127.0.0.1/', + 'localhost': 'mongodb://localhost/', }[str(self.settings.client)], ), ] @@ -678,26 +684,26 @@ def name_parts(self) -> Iterable[str]: self.name_prefix, f'server-{self.display("server")}', f'client-{self.display("client")}', - "noauth", - "nosasl", - "nossl", + 'noauth', + 'nosasl', + 'nossl', ) def do_is_valid_combination(self) -> bool: # This would fail by design. - if self.settings.server == "ipv4": - prohibit(self.settings.client == "ipv6") + if self.settings.server == 'ipv4': + prohibit(self.settings.client == 'ipv6') # Default configuration is tested in other variants. - if self.settings.server == "ipv6": - prohibit(self.settings.client == "localhost") + if self.settings.server == 'ipv6': + prohibit(self.settings.client == 'localhost') return True all_tasks = chain(all_tasks, IPTask.matrix()) aws_compile_task = NamedTask( - "debug-compile-aws", + 'debug-compile-aws', commands=[ func(InstallUV.name), shell_mongoc( @@ -718,7 +724,7 @@ def do_is_valid_combination(self) -> bool: include_expansions_in_env=['CC'], redirect_standard_error_to_output=True, ), - func("upload-build"), + func('upload-build'), ], ) @@ -728,38 +734,38 @@ def do_is_valid_combination(self) -> bool: class AWSTestTask(MatrixTask): axes = OD( [ - ("testcase", ["regular", "ec2", "ecs", "lambda", "assume_role", "assume_role_with_web_identity"]), - ("version", ["latest", "8.0", "7.0", "6.0", "5.0", "4.4"]), + ('testcase', ['regular', 'ec2', 'ecs', 'lambda', 'assume_role', 'assume_role_with_web_identity']), + ('version', ['latest', '8.0', '7.0', '6.0', '5.0', '4.4']), ] ) - name_prefix = "test-aws-openssl" + name_prefix = 'test-aws-openssl' def additional_dependencies(self) -> Iterable[DependencySpec]: - yield "debug-compile-aws" + yield 'debug-compile-aws' def additional_tags(self) -> Iterable[str]: yield from super().additional_tags() yield f'{self.settings.version}' - yield f'test-aws' + yield 'test-aws' def post_commands(self) -> Iterable[Value]: return [ - func("fetch-build", BUILD_NAME="debug-compile-aws"), - func("fetch-det"), + func('fetch-build', BUILD_NAME='debug-compile-aws'), + func('fetch-det'), func( - "bootstrap-mongo-orchestration", - AUTH="auth", - ORCHESTRATION_FILE="auth-aws.json", + 'bootstrap-mongo-orchestration', + AUTH='auth', + ORCHESTRATION_FILE='auth-aws.json', MONGODB_VERSION=self.settings.version, - TOPOLOGY="server", + TOPOLOGY='server', ), - func("run aws tests", TESTCASE=str(self.settings.testcase).upper()), + func('run aws tests', TESTCASE=str(self.settings.testcase).upper()), ] @property def name(self): - return f"{self.name_prefix}-{self.settings.testcase}-{self.settings.version}" + return f'{self.name_prefix}-{self.settings.testcase}-{self.settings.version}' all_tasks = chain(all_tasks, AWSTestTask.matrix()) @@ -769,26 +775,26 @@ class OCSPTask(MatrixTask): axes = OD( [ ( - "test", + 'test', [ - "test_1", - "test_2", - "test_3", - "test_4", - "soft_fail_test", - "malicious_server_test_1", - "malicious_server_test_2", - "cache", + 'test_1', + 'test_2', + 'test_3', + 'test_4', + 'soft_fail_test', + 'malicious_server_test_1', + 'malicious_server_test_2', + 'cache', ], ), - ("delegate", ["delegate", "nodelegate"]), - ("cert", ["rsa", "ecdsa"]), - ("ssl", ["openssl", "darwinssl", "winssl"]), - ("version", ["latest", "8.0", "7.0", "6.0", "5.0", "4.4"]), + ('delegate', ['delegate', 'nodelegate']), + ('cert', ['rsa', 'ecdsa']), + ('ssl', ['openssl', 'darwinssl', 'winssl']), + ('version', ['latest', '8.0', '7.0', '6.0', '5.0', '4.4']), ] ) - name_prefix = "test-ocsp" + name_prefix = 'test-ocsp' @property def build_task_name(self) -> str: @@ -803,34 +809,34 @@ def additional_dependencies(self) -> Iterable[DependencySpec]: @property def name(self): - return f"ocsp-{self.settings.ssl}-{self.test}-{self.settings.cert}-{self.settings.delegate}-{self.settings.version}" + return f'ocsp-{self.settings.ssl}-{self.test}-{self.settings.cert}-{self.settings.delegate}-{self.settings.version}' @property def test(self) -> str: return str(self.settings.test) def post_commands(self) -> Iterable[Value]: - yield func("fetch-build", BUILD_NAME=self.build_task_name) - yield func("fetch-det") + yield func('fetch-build', BUILD_NAME=self.build_task_name) + yield func('fetch-det') - stapling = "mustStaple" - if self.test in ["test_3", "test_4", "soft_fail_test", "cache"]: - stapling = "disableStapling" - if self.test in ["malicious_server_test_1", "malicious_server_test_2"]: - stapling = "mustStaple-disableStapling" + stapling = 'mustStaple' + if self.test in ['test_3', 'test_4', 'soft_fail_test', 'cache']: + stapling = 'disableStapling' + if self.test in ['malicious_server_test_1', 'malicious_server_test_2']: + stapling = 'mustStaple-disableStapling' orchestration = func( - "bootstrap-mongo-orchestration", + 'bootstrap-mongo-orchestration', MONGODB_VERSION=self.settings.version, - TOPOLOGY="server", - SSL="ssl", - OCSP="on", - ORCHESTRATION_FILE=f"{self.settings.cert}-basic-tls-ocsp-{stapling}.json", + TOPOLOGY='server', + SSL='ssl', + OCSP='on', + ORCHESTRATION_FILE=f'{self.settings.cert}-basic-tls-ocsp-{stapling}.json', ) # The cache test expects a revoked response from an OCSP responder, exactly like TEST_4. - test_column = "TEST_4" if self.test == "cache" else str(self.test).upper() - use_delegate = "ON" if self.settings.delegate == "delegate" else "OFF" + test_column = 'TEST_4' if self.test == 'cache' else str(self.test).upper() + use_delegate = 'ON' if self.settings.delegate == 'delegate' else 'OFF' yield ( shell_mongoc( @@ -842,7 +848,7 @@ def post_commands(self) -> Iterable[Value]: yield (orchestration) - if self.test == "cache": + if self.test == 'cache': yield ( shell_mongoc( f""" @@ -866,35 +872,35 @@ def to_dict(self): # OCSP tests should run with a batchtime of 14 days. Avoid running OCSP # tests in patch builds by default (only in commit builds). - task["patchable"] = False + task['patchable'] = False return task # Testing in OCSP has a lot of exceptions. def do_is_valid_combination(self) -> bool: - if self.settings.ssl == "darwinssl": + if self.settings.ssl == 'darwinssl': # Secure Transport quietly ignores a must-staple certificate with no stapled response. - prohibit(self.test == "malicious_server_test_2") + prohibit(self.test == 'malicious_server_test_2') # Why does this fail with Secure Transport (CSSMERR_TP_CERT_SUSPENDED)...? - prohibit(self.test == "test_3") + prohibit(self.test == 'test_3') # CDRIVER-3759: Secure Transport does not implement soft failure? - prohibit(self.test == "soft_fail_test") + prohibit(self.test == 'soft_fail_test') # Only Server 6.0+ are available on MacOS ARM64. - if self.settings.version != "latest": - prohibit(Version(self.settings.version) < Version("6.0")) + if self.settings.version != 'latest': + prohibit(Version(self.settings.version) < Version('6.0')) - if self.settings.ssl == "darwinssl" or self.settings.ssl == "winssl": + if self.settings.ssl == 'darwinssl' or self.settings.ssl == 'winssl': # ECDSA certs can't be loaded (in the PEM format they're stored) on Windows/macOS. Skip them. - prohibit(self.settings.cert == "ecdsa") + prohibit(self.settings.cert == 'ecdsa') # OCSP stapling is not supported on macOS or Windows. - prohibit(self.test in ["test_1", "test_2", "cache"]) + prohibit(self.test in ['test_1', 'test_2', 'cache']) - if self.test == "soft_fail_test" or self.test == "malicious_server_test_2" or self.test == "cache": - prohibit(self.settings.delegate == "delegate") + if self.test == 'soft_fail_test' or self.test == 'malicious_server_test_2' or self.test == 'cache': + prohibit(self.settings.delegate == 'delegate') return True diff --git a/.evergreen/legacy_config_generator/evergreen_config_lib/testazurekms.py b/.evergreen/legacy_config_generator/evergreen_config_lib/testazurekms.py index c3e9621c731..4d34fb16699 100644 --- a/.evergreen/legacy_config_generator/evergreen_config_lib/testazurekms.py +++ b/.evergreen/legacy_config_generator/evergreen_config_lib/testazurekms.py @@ -18,19 +18,19 @@ from collections import OrderedDict as OD from typing import MutableSequence -from config_generator.components.funcs.install_uv import InstallUV - -from evergreen_config_generator.functions import shell_exec, func +from evergreen_config_generator.functions import func, shell_exec +from evergreen_config_generator.taskgroups import TaskGroup from evergreen_config_generator.tasks import NamedTask from evergreen_config_generator.variants import Variant -from evergreen_config_generator.taskgroups import TaskGroup + +from config_generator.components.funcs.install_uv import InstallUV def _create_tasks(): # passtask is expected to run on a remote Azure VM and succeed at obtaining credentials. - passtask = NamedTask(task_name="testazurekms-task") + passtask = NamedTask(task_name='testazurekms-task') passtask.commands = [ - func("fetch-source"), + func('fetch-source'), func(InstallUV.name), shell_exec( r""" @@ -73,9 +73,9 @@ def _create_tasks(): ), ] - failtask = NamedTask(task_name="testazurekms-fail-task") + failtask = NamedTask(task_name='testazurekms-fail-task') failtask.commands = [ - func("fetch-source"), + func('fetch-source'), func(InstallUV.name), shell_exec( r""" @@ -102,28 +102,23 @@ def _create_tasks(): def _create_variant(): return Variant( - name="testazurekms-variant", - display_name="Azure KMS", + name='testazurekms-variant', + display_name='Azure KMS', # Azure Virtual Machine created is Debian 10. - run_on="debian11-small", - tasks=["testazurekms_task_group", "testazurekms-fail-task"], + run_on='debian11-small', + tasks=['testazurekms_task_group', 'testazurekms-fail-task'], batchtime=20160, ) # Use a batchtime of 14 days as suggested by the CSFLE test README def _create_task_group(): - task_group = TaskGroup(name="testazurekms_task_group") + task_group = TaskGroup(name='testazurekms_task_group') task_group.setup_group_can_fail_task = True task_group.setup_group_timeout_secs = 1800 # 30 minutes task_group.setup_group = [ - func("fetch-det"), + func('fetch-det'), # Assume role to get AWS secrets. - { - "command": "ec2.assume_role", - "params": { - "role_arn": "${aws_test_secrets_role}" - } - }, + {'command': 'ec2.assume_role', 'params': {'role_arn': '${aws_test_secrets_role}'}}, shell_exec( r""" DRIVERS_TOOLS=$(pwd)/drivers-evergreen-tools @@ -143,17 +138,17 @@ def _create_task_group(): $DRIVERS_TOOLS/.evergreen/csfle/azurekms/create-and-setup-vm.sh """, test=False, - include_expansions_in_env=[ "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN" ] + include_expansions_in_env=['AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY', 'AWS_SESSION_TOKEN'], ), # Load the AZUREKMS_VMNAME expansion. OD( [ - ("command", "expansions.update"), + ('command', 'expansions.update'), ( - "params", + 'params', OD( [ - ("file", "testazurekms-expansions.yml"), + ('file', 'testazurekms-expansions.yml'), ] ), ), @@ -165,12 +160,12 @@ def _create_task_group(): # Load expansions again. The setup task may have failed before running `expansions.update`. OD( [ - ("command", "expansions.update"), + ('command', 'expansions.update'), ( - "params", + 'params', OD( [ - ("file", "testazurekms-expansions.yml"), + ('file', 'testazurekms-expansions.yml'), ] ), ), @@ -186,7 +181,7 @@ def _create_task_group(): test=False, ), ] - task_group.tasks = ["testazurekms-task"] + task_group.tasks = ['testazurekms-task'] return task_group diff --git a/.evergreen/legacy_config_generator/evergreen_config_lib/testgcpkms.py b/.evergreen/legacy_config_generator/evergreen_config_lib/testgcpkms.py index d2eae19ed02..1790636fac9 100644 --- a/.evergreen/legacy_config_generator/evergreen_config_lib/testgcpkms.py +++ b/.evergreen/legacy_config_generator/evergreen_config_lib/testgcpkms.py @@ -17,19 +17,19 @@ from collections import OrderedDict as OD from typing import MutableSequence -from config_generator.components.funcs.install_uv import InstallUV - -from evergreen_config_generator.functions import shell_exec, func +from evergreen_config_generator.functions import func, shell_exec +from evergreen_config_generator.taskgroups import TaskGroup from evergreen_config_generator.tasks import NamedTask from evergreen_config_generator.variants import Variant -from evergreen_config_generator.taskgroups import TaskGroup + +from config_generator.components.funcs.install_uv import InstallUV def _create_tasks(): passtask = NamedTask( - task_name="testgcpkms-task", + task_name='testgcpkms-task', commands=[ - func("fetch-source"), + func('fetch-source'), func(InstallUV.name), shell_exec( r""" @@ -70,7 +70,7 @@ def _create_tasks(): ) failtask = NamedTask( - task_name="testgcpkms-fail-task", + task_name='testgcpkms-fail-task', commands=[ func(InstallUV.name), shell_exec( @@ -97,21 +97,21 @@ def _create_tasks(): def _create_variant(): return Variant( - name="testgcpkms-variant", - display_name="GCP KMS", + name='testgcpkms-variant', + display_name='GCP KMS', # GCP Virtual Machine created is Debian 11. - run_on="debian11-small", - tasks=["testgcpkms_task_group", "testgcpkms-fail-task"], + run_on='debian11-small', + tasks=['testgcpkms_task_group', 'testgcpkms-fail-task'], batchtime=20160, ) # Use a batchtime of 14 days as suggested by the CSFLE test README def _create_task_group(): - task_group = TaskGroup(name="testgcpkms_task_group") + task_group = TaskGroup(name='testgcpkms_task_group') task_group.setup_group_can_fail_task = True task_group.setup_group_timeout_secs = 1800 # 30 minutes task_group.setup_group = [ - func("fetch-det"), + func('fetch-det'), # Create and set up a GCE instance using driver tools script shell_exec( r""" @@ -124,7 +124,7 @@ def _create_task_group(): test=False, ), # Load the GCPKMS_GCLOUD, GCPKMS_INSTANCE, GCPKMS_PROJECT, and GCPKMS_ZONE expansions. - OD([("command", "expansions.update"), ("params", OD([("file", "testgcpkms-expansions.yml")]))]), + OD([('command', 'expansions.update'), ('params', OD([('file', 'testgcpkms-expansions.yml')]))]), ] task_group.teardown_group = [ @@ -139,7 +139,7 @@ def _create_task_group(): test=False, ) ] - task_group.tasks = ["testgcpkms-task"] + task_group.tasks = ['testgcpkms-task'] return task_group diff --git a/.evergreen/legacy_config_generator/evergreen_config_lib/variants.py b/.evergreen/legacy_config_generator/evergreen_config_lib/variants.py index 06a37a08247..1b1b7a67692 100644 --- a/.evergreen/legacy_config_generator/evergreen_config_lib/variants.py +++ b/.evergreen/legacy_config_generator/evergreen_config_lib/variants.py @@ -16,12 +16,11 @@ from evergreen_config_generator.variants import Variant - mobile_flags = ( - " -DCMAKE_FIND_ROOT_PATH_MODE_LIBRARY=ONLY" - " -DCMAKE_FIND_ROOT_PATH_MODE_PACKAGE=ONLY" - " -DCMAKE_FIND_ROOT_PATH_MODE_PROGRAM=NEVER" - " -DCMAKE_FIND_ROOT_PATH_MODE_INCLUDE=ONLY" + ' -DCMAKE_FIND_ROOT_PATH_MODE_LIBRARY=ONLY' + ' -DCMAKE_FIND_ROOT_PATH_MODE_PACKAGE=ONLY' + ' -DCMAKE_FIND_ROOT_PATH_MODE_PROGRAM=NEVER' + ' -DCMAKE_FIND_ROOT_PATH_MODE_INCLUDE=ONLY' ) @@ -32,322 +31,322 @@ def days(n: int) -> int: all_variants = [ Variant( - "abi-compliance-check", - "ABI Compliance Check", - ["ubuntu2004-small", "ubuntu2004-medium", "ubuntu2004-large"], - ["abi-compliance-check"], + 'abi-compliance-check', + 'ABI Compliance Check', + ['ubuntu2004-small', 'ubuntu2004-medium', 'ubuntu2004-large'], + ['abi-compliance-check'], ), Variant( - "smoke", - "Smoke Tests", - "ubuntu2204-small", + 'smoke', + 'Smoke Tests', + 'ubuntu2204-small', [ - "make-docs", - "kms-divergence-check", - "release-compile", - "debug-compile-no-counters", - "compile-tracing", - "link-with-cmake", - "link-with-cmake-ssl", - "link-with-cmake-snappy", - "verify-headers", - OD([("name", "link-with-cmake-mac"), ("distros", ["macos-14-arm64"])]), - OD([("name", "link-with-cmake-windows"), ("distros", ["windows-vsCurrent-large"])]), - OD([("name", "link-with-cmake-windows-ssl"), ("distros", ["windows-vsCurrent-large"])]), - OD([("name", "link-with-cmake-windows-snappy"), ("distros", ["windows-vsCurrent-large"])]), - OD([("name", "link-with-cmake-mingw"), ("distros", ["windows-vsCurrent-large"])]), - OD([("name", "link-with-pkg-config"), ("distros", ["ubuntu2004-test"])]), - OD([("name", "link-with-pkg-config-mac"), ("distros", ["macos-14-arm64"])]), - "link-with-pkg-config-ssl", - "link-with-bson", - OD([("name", "link-with-bson-windows"), ("distros", ["windows-vsCurrent-large"])]), - OD([("name", "link-with-bson-mac"), ("distros", ["macos-14-arm64"])]), - OD([("name", "link-with-bson-mingw"), ("distros", ["windows-vsCurrent-large"])]), - "check-headers", - "debug-compile-with-warnings", - "install-libmongoc-after-libbson", + 'make-docs', + 'kms-divergence-check', + 'release-compile', + 'debug-compile-no-counters', + 'compile-tracing', + 'link-with-cmake', + 'link-with-cmake-ssl', + 'link-with-cmake-snappy', + 'verify-headers', + OD([('name', 'link-with-cmake-mac'), ('distros', ['macos-14-arm64'])]), + OD([('name', 'link-with-cmake-windows'), ('distros', ['windows-vsCurrent-large'])]), + OD([('name', 'link-with-cmake-windows-ssl'), ('distros', ['windows-vsCurrent-large'])]), + OD([('name', 'link-with-cmake-windows-snappy'), ('distros', ['windows-vsCurrent-large'])]), + OD([('name', 'link-with-cmake-mingw'), ('distros', ['windows-vsCurrent-large'])]), + OD([('name', 'link-with-pkg-config'), ('distros', ['ubuntu2004-test'])]), + OD([('name', 'link-with-pkg-config-mac'), ('distros', ['macos-14-arm64'])]), + 'link-with-pkg-config-ssl', + 'link-with-bson', + OD([('name', 'link-with-bson-windows'), ('distros', ['windows-vsCurrent-large'])]), + OD([('name', 'link-with-bson-mac'), ('distros', ['macos-14-arm64'])]), + OD([('name', 'link-with-bson-mingw'), ('distros', ['windows-vsCurrent-large'])]), + 'check-headers', + 'debug-compile-with-warnings', + 'install-libmongoc-after-libbson', ], { # Disable the MongoDB legacy shell download, which is not available in 5.0 for u22 - "SKIP_LEGACY_SHELL": "1" + 'SKIP_LEGACY_SHELL': '1' }, - tags=["pr-merge-gate"], + tags=['pr-merge-gate'], ), Variant( - "clang37", - "clang 3.7 (Archlinux)", - "archlinux-test", + 'clang37', + 'clang 3.7 (Archlinux)', + 'archlinux-test', [ - "release-compile", - "debug-compile-sasl-openssl", - "debug-compile-nosasl-openssl", - ".authentication-tests .openssl", + 'release-compile', + 'debug-compile-sasl-openssl', + 'debug-compile-nosasl-openssl', + '.authentication-tests .openssl', ], - {"CC": "clang"}, + {'CC': 'clang'}, ), Variant( - "clang100-i686", - "clang 10.0 (i686) (Ubuntu 20.04)", - "ubuntu2004-test", + 'clang100-i686', + 'clang 10.0 (i686) (Ubuntu 20.04)', + 'ubuntu2004-test', [ - "release-compile", - "debug-compile-nosasl-nossl", - ".debug-compile !.sspi .nossl .nosasl", - ".latest .nossl .nosasl", + 'release-compile', + 'debug-compile-nosasl-nossl', + '.debug-compile !.sspi .nossl .nosasl', + '.latest .nossl .nosasl', ], - {"CC": "clang", "MARCH": "i686"}, + {'CC': 'clang', 'MARCH': 'i686'}, ), Variant( - "gcc82rhel", - "GCC 8.2 (RHEL 8.0)", - "rhel80-test", + 'gcc82rhel', + 'GCC 8.2 (RHEL 8.0)', + 'rhel80-test', [ - ".hardened", - ".compression !.snappy !.zstd", - "release-compile", - "debug-compile-nosasl-nossl", - "debug-compile-nosasl-openssl", - "debug-compile-sasl-openssl", - ".authentication-tests .openssl", - ".latest .nossl", + '.hardened', + '.compression !.snappy !.zstd', + 'release-compile', + 'debug-compile-nosasl-nossl', + 'debug-compile-nosasl-openssl', + 'debug-compile-sasl-openssl', + '.authentication-tests .openssl', + '.latest .nossl', ], - {"CC": "gcc"}, + {'CC': 'gcc'}, ), Variant( - "gcc102", - "GCC 10.2 (Debian 11.0)", - "debian11-large", - ["release-compile", "debug-compile-nosasl-nossl", ".latest .nossl"], - {"CC": "gcc"}, + 'gcc102', + 'GCC 10.2 (Debian 11.0)', + 'debian11-large', + ['release-compile', 'debug-compile-nosasl-nossl', '.latest .nossl'], + {'CC': 'gcc'}, ), Variant( - "gcc94-i686", - "GCC 9.4 (i686) (Ubuntu 20.04)", - "ubuntu2004-test", - ["release-compile", "debug-compile-nosasl-nossl", ".latest .nossl .nosasl"], - {"CC": "gcc", "MARCH": "i686"}, + 'gcc94-i686', + 'GCC 9.4 (i686) (Ubuntu 20.04)', + 'ubuntu2004-test', + ['release-compile', 'debug-compile-nosasl-nossl', '.latest .nossl .nosasl'], + {'CC': 'gcc', 'MARCH': 'i686'}, ), Variant( - "gcc94", - "GCC 9.4 (Ubuntu 20.04)", - "ubuntu2004-test", + 'gcc94', + 'GCC 9.4 (Ubuntu 20.04)', + 'ubuntu2004-test', [ - ".compression !.zstd", - "debug-compile-nosrv", - "release-compile", - "debug-compile-nosasl-nossl", - "debug-compile-sasl-openssl", - "debug-compile-nosasl-openssl", - ".authentication-tests .openssl", - ".authentication-tests .asan", - ".test-coverage", - ".latest .nossl", - "retry-true-latest-server", - "test-dns-openssl", - "test-dns-auth-openssl", - "test-dns-loadbalanced-openssl", + '.compression !.zstd', + 'debug-compile-nosrv', + 'release-compile', + 'debug-compile-nosasl-nossl', + 'debug-compile-sasl-openssl', + 'debug-compile-nosasl-openssl', + '.authentication-tests .openssl', + '.authentication-tests .asan', + '.test-coverage', + '.latest .nossl', + 'retry-true-latest-server', + 'test-dns-openssl', + 'test-dns-auth-openssl', + 'test-dns-loadbalanced-openssl', ], - {"CC": "gcc"}, + {'CC': 'gcc'}, ), Variant( - "darwin", - "*Darwin, macOS (Apple LLVM)", - "macos-14-arm64", + 'darwin', + '*Darwin, macOS (Apple LLVM)', + 'macos-14-arm64', [ - ".compression !.snappy", - "release-compile", - "debug-compile-nosasl-nossl", - "debug-compile-nosrv", - "debug-compile-sasl-darwinssl", - "debug-compile-nosasl-nossl", - ".authentication-tests .darwinssl", - ".latest .nossl", - "test-dns-darwinssl", - "test-dns-auth-darwinssl", - "debug-compile-lto", - "debug-compile-lto-thin", - "debug-compile-aws", - "test-aws-openssl-regular-latest", + '.compression !.snappy', + 'release-compile', + 'debug-compile-nosasl-nossl', + 'debug-compile-nosrv', + 'debug-compile-sasl-darwinssl', + 'debug-compile-nosasl-nossl', + '.authentication-tests .darwinssl', + '.latest .nossl', + 'test-dns-darwinssl', + 'test-dns-auth-darwinssl', + 'debug-compile-lto', + 'debug-compile-lto-thin', + 'debug-compile-aws', + 'test-aws-openssl-regular-latest', ], - {"CC": "clang"}, + {'CC': 'clang'}, ), Variant( - "darwin-intel", - "*Darwin, Intel macOS (Apple LLVM)", - "macos-14", + 'darwin-intel', + '*Darwin, Intel macOS (Apple LLVM)', + 'macos-14', [ - "debug-compile-aws", - "debug-compile-rdtscp", - "test-aws-openssl-regular-4.4", + 'debug-compile-aws', + 'debug-compile-rdtscp', + 'test-aws-openssl-regular-4.4', ], - {"CC": "clang"}, + {'CC': 'clang'}, ), Variant( - "windows-2017-32", - "Windows (i686) (VS 2017)", - "windows-vsCurrent-large", - ["debug-compile-nosasl-nossl", ".latest .nossl .nosasl"], - {"CC": "Visual Studio 15 2017"}, + 'windows-2017-32', + 'Windows (i686) (VS 2017)', + 'windows-vsCurrent-large', + ['debug-compile-nosasl-nossl', '.latest .nossl .nosasl'], + {'CC': 'Visual Studio 15 2017'}, ), Variant( - "windows-2017", - "Windows (VS 2017)", - "windows-vsCurrent-large", + 'windows-2017', + 'Windows (VS 2017)', + 'windows-vsCurrent-large', [ - "release-compile", - "debug-compile-nosasl-nossl", - "debug-compile-nosasl-openssl", - "debug-compile-sspi-winssl", - "debug-compile-nosrv", - ".latest .nossl", - ".nosasl .latest .nossl", - ".compression !.snappy !.zstd !.latest", - "test-dns-winssl", - "test-dns-auth-winssl", - "debug-compile-aws", - "test-aws-openssl-regular-4.4", - "test-aws-openssl-regular-latest", + 'release-compile', + 'debug-compile-nosasl-nossl', + 'debug-compile-nosasl-openssl', + 'debug-compile-sspi-winssl', + 'debug-compile-nosrv', + '.latest .nossl', + '.nosasl .latest .nossl', + '.compression !.snappy !.zstd !.latest', + 'test-dns-winssl', + 'test-dns-auth-winssl', + 'debug-compile-aws', + 'test-aws-openssl-regular-4.4', + 'test-aws-openssl-regular-latest', # Authentication tests with OpenSSL on Windows are only run on the vs2017 variant. # Older vs variants fail to verify certificates against Atlas tests. - ".authentication-tests .openssl !.sasl", - ".authentication-tests .winssl", + '.authentication-tests .openssl !.sasl', + '.authentication-tests .winssl', ], - {"CC": "Visual Studio 15 2017 Win64"}, + {'CC': 'Visual Studio 15 2017 Win64'}, ), Variant( - "mingw-windows2016", - "MinGW-W64 (Windows Server 2016)", - "windows-vsCurrent-large", - ["debug-compile-nosasl-nossl", ".latest .nossl .nosasl .server"], - {"CC": "gcc"}, + 'mingw-windows2016', + 'MinGW-W64 (Windows Server 2016)', + 'windows-vsCurrent-large', + ['debug-compile-nosasl-nossl', '.latest .nossl .nosasl .server'], + {'CC': 'gcc'}, ), Variant( - "rhel8-power", - "Power (ppc64le) (RHEL 8)", - "rhel8-power-large", + 'rhel8-power', + 'Power (ppc64le) (RHEL 8)', + 'rhel8-power-large', [ - "release-compile", - "debug-compile-nosasl-nossl", - "debug-compile-sasl-openssl", - ".latest .nossl", - "test-dns-openssl", + 'release-compile', + 'debug-compile-nosasl-nossl', + 'debug-compile-sasl-openssl', + '.latest .nossl', + 'test-dns-openssl', ], - {"CC": "gcc"}, + {'CC': 'gcc'}, patchable=False, batchtime=days(1), ), Variant( - "arm-ubuntu2004", - "*ARM (aarch64) (Ubuntu 20.04)", - "ubuntu2004-arm64-large", + 'arm-ubuntu2004', + '*ARM (aarch64) (Ubuntu 20.04)', + 'ubuntu2004-arm64-large', [ - ".compression !.snappy !.zstd", - "release-compile", - "debug-compile-nosasl-nossl", - "debug-compile-nosasl-openssl", - "debug-compile-sasl-openssl", - ".authentication-tests .openssl", - ".latest .nossl", - "test-dns-openssl", + '.compression !.snappy !.zstd', + 'release-compile', + 'debug-compile-nosasl-nossl', + 'debug-compile-nosasl-openssl', + 'debug-compile-sasl-openssl', + '.authentication-tests .openssl', + '.latest .nossl', + 'test-dns-openssl', ], - {"CC": "gcc"}, + {'CC': 'gcc'}, batchtime=days(1), ), Variant( - "zseries-rhel8", - "*zSeries", - "rhel8-zseries-large", + 'zseries-rhel8', + '*zSeries', + 'rhel8-zseries-large', [ - "release-compile", + 'release-compile', # '.compression', --> TODO: waiting on ticket CDRIVER-3258 - "debug-compile-nosasl-nossl", - "debug-compile-nosasl-openssl", - "debug-compile-sasl-openssl", - ".authentication-tests .openssl", - ".latest .nossl", + 'debug-compile-nosasl-nossl', + 'debug-compile-nosasl-openssl', + 'debug-compile-sasl-openssl', + '.authentication-tests .openssl', + '.latest .nossl', ], - {"CC": "gcc"}, + {'CC': 'gcc'}, patchable=False, batchtime=days(1), ), # Run AWS tests for MongoDB 4.4 and 5.0 on Ubuntu 20.04. AWS setup scripts # expect Ubuntu 20.04+. MongoDB 4.4 and 5.0 are not available on 22.04. Variant( - "aws-ubuntu2004", - "AWS Tests (Ubuntu 20.04)", - "ubuntu2004-small", + 'aws-ubuntu2004', + 'AWS Tests (Ubuntu 20.04)', + 'ubuntu2004-small', [ - "debug-compile-aws", - ".test-aws .4.4", - ".test-aws .5.0", + 'debug-compile-aws', + '.test-aws .4.4', + '.test-aws .5.0', ], - {"CC": "clang"}, + {'CC': 'clang'}, ), Variant( - "aws-ubuntu2204", - "AWS Tests (Ubuntu 22.04)", - "ubuntu2004-small", + 'aws-ubuntu2204', + 'AWS Tests (Ubuntu 22.04)', + 'ubuntu2004-small', [ - "debug-compile-aws", - ".test-aws .6.0", - ".test-aws .7.0", - ".test-aws .8.0", - ".test-aws .latest", + 'debug-compile-aws', + '.test-aws .6.0', + '.test-aws .7.0', + '.test-aws .8.0', + '.test-aws .latest', ], - {"CC": "clang"}, + {'CC': 'clang'}, ), Variant( - "ocsp", - "OCSP tests", - "ubuntu2004-small", + 'ocsp', + 'OCSP tests', + 'ubuntu2004-small', [ - OD([("name", "debug-compile-nosasl-openssl")]), - OD([("name", "debug-compile-nosasl-darwinssl"), ("distros", ["macos-14-arm64"])]), - OD([("name", "debug-compile-nosasl-winssl"), ("distros", ["windows-vsCurrent-large"])]), - OD([("name", ".ocsp-openssl")]), - OD([("name", ".ocsp-darwinssl"), ("distros", ["macos-14-arm64"])]), - OD([("name", ".ocsp-winssl"), ("distros", ["windows-vsCurrent-large"])]), + OD([('name', 'debug-compile-nosasl-openssl')]), + OD([('name', 'debug-compile-nosasl-darwinssl'), ('distros', ['macos-14-arm64'])]), + OD([('name', 'debug-compile-nosasl-winssl'), ('distros', ['windows-vsCurrent-large'])]), + OD([('name', '.ocsp-openssl')]), + OD([('name', '.ocsp-darwinssl'), ('distros', ['macos-14-arm64'])]), + OD([('name', '.ocsp-winssl'), ('distros', ['windows-vsCurrent-large'])]), ], {}, batchtime=days(7), display_tasks=[ { - "name": "ocsp-openssl", - "execution_tasks": [".ocsp-openssl"], + 'name': 'ocsp-openssl', + 'execution_tasks': ['.ocsp-openssl'], }, { - "name": "ocsp-darwinssl", - "execution_tasks": [".ocsp-darwinssl"], + 'name': 'ocsp-darwinssl', + 'execution_tasks': ['.ocsp-darwinssl'], }, { - "name": "ocsp-winssl", - "execution_tasks": [".ocsp-winssl"], + 'name': 'ocsp-winssl', + 'execution_tasks': ['.ocsp-winssl'], }, ], ), Variant( - "packaging", - "Linux Distro Packaging", - "debian12-latest-small", + 'packaging', + 'Linux Distro Packaging', + 'debian12-latest-small', [ - "debian-package-build", - OD([("name", "rpm-package-build"), ("distros", ["rhel90-arm64-small"])]), + 'debian-package-build', + OD([('name', 'rpm-package-build'), ('distros', ['rhel90-arm64-small'])]), ], {}, - tags=["pr-merge-gate"], + tags=['pr-merge-gate'], ), # Test 7.0+ with Ubuntu 20.04+ since MongoDB 7.0 no longer ships binaries for Ubuntu 18.04. Variant( - "versioned-api-ubuntu2004", - "Versioned API Tests (Ubuntu 20.04)", - "ubuntu2004-test", + 'versioned-api-ubuntu2004', + 'Versioned API Tests (Ubuntu 20.04)', + 'ubuntu2004-test', [ - "debug-compile-nosasl-openssl", - "debug-compile-nosasl-nossl", - ".versioned-api .5.0", - ".versioned-api .6.0", - ".versioned-api .7.0", - ".versioned-api .8.0", + 'debug-compile-nosasl-openssl', + 'debug-compile-nosasl-nossl', + '.versioned-api .5.0', + '.versioned-api .6.0', + '.versioned-api .7.0', + '.versioned-api .8.0', ], {}, ), diff --git a/.evergreen/legacy_config_generator/generate-evergreen-config.py b/.evergreen/legacy_config_generator/generate-evergreen-config.py index 904581b57e6..ee68060c7eb 100644 --- a/.evergreen/legacy_config_generator/generate-evergreen-config.py +++ b/.evergreen/legacy_config_generator/generate-evergreen-config.py @@ -23,16 +23,16 @@ """ from collections import OrderedDict as OD -from os.path import dirname, join as joinpath, normpath +from os.path import dirname, normpath +from os.path import join as joinpath from evergreen_config_generator import generate - from evergreen_config_lib.functions import all_functions -from evergreen_config_lib.tasks import all_tasks -from evergreen_config_lib.variants import all_variants from evergreen_config_lib.taskgroups import all_task_groups -from evergreen_config_lib.testgcpkms import testgcpkms_generate +from evergreen_config_lib.tasks import all_tasks from evergreen_config_lib.testazurekms import testazurekms_generate +from evergreen_config_lib.testgcpkms import testgcpkms_generate +from evergreen_config_lib.variants import all_variants task_groups = list(all_task_groups) testazurekms_generate(all_tasks, all_variants, task_groups) @@ -40,13 +40,13 @@ config = OD( [ - ("functions", all_functions), - ("tasks", all_tasks), - ("task_groups", task_groups), - ("buildvariants", all_variants), + ('functions', all_functions), + ('tasks', all_tasks), + ('task_groups', task_groups), + ('buildvariants', all_variants), ] ) this_dir = dirname(__file__) -generated_configs_dir = normpath(joinpath(this_dir, "../generated_configs")) -generate(config, joinpath(generated_configs_dir, "legacy-config.yml")) +generated_configs_dir = normpath(joinpath(this_dir, '../generated_configs')) +generate(config, joinpath(generated_configs_dir, 'legacy-config.yml')) diff --git a/.evergreen/ocsp/ecdsa/mock-delegate-revoked.sh b/.evergreen/ocsp/ecdsa/mock-delegate-revoked.sh index 1e40fba5a7c..c2a87a10356 100755 --- a/.evergreen/ocsp/ecdsa/mock-delegate-revoked.sh +++ b/.evergreen/ocsp/ecdsa/mock-delegate-revoked.sh @@ -3,6 +3,6 @@ python3 ../ocsp_mock.py \ --ca_file ca.pem \ --ocsp_responder_cert ocsp-responder.crt \ --ocsp_responder_key ocsp-responder.key \ - -p 8100 \ - -v \ - --fault revoked + -p 8100 \ + -v \ + --fault revoked diff --git a/.evergreen/ocsp/ecdsa/mock-delegate-valid.sh b/.evergreen/ocsp/ecdsa/mock-delegate-valid.sh index 5074a7ecabd..5b0d33b4f94 100755 --- a/.evergreen/ocsp/ecdsa/mock-delegate-valid.sh +++ b/.evergreen/ocsp/ecdsa/mock-delegate-valid.sh @@ -3,5 +3,5 @@ python3 ../ocsp_mock.py \ --ca_file ca.pem \ --ocsp_responder_cert ocsp-responder.crt \ --ocsp_responder_key ocsp-responder.key \ - -p 8100 \ - -v + -p 8100 \ + -v diff --git a/.evergreen/ocsp/ecdsa/mock-revoked.sh b/.evergreen/ocsp/ecdsa/mock-revoked.sh index a6bf2ef025e..bff4ef1f61c 100755 --- a/.evergreen/ocsp/ecdsa/mock-revoked.sh +++ b/.evergreen/ocsp/ecdsa/mock-revoked.sh @@ -4,7 +4,6 @@ python3 ../ocsp_mock.py \ --ca_file ca.pem \ --ocsp_responder_cert ca.crt \ --ocsp_responder_key ca.key \ - -p 8100 \ - -v \ - --fault revoked - + -p 8100 \ + -v \ + --fault revoked diff --git a/.evergreen/ocsp/ecdsa/mock-valid.sh b/.evergreen/ocsp/ecdsa/mock-valid.sh index c89ce9e954d..d015870e5a9 100755 --- a/.evergreen/ocsp/ecdsa/mock-valid.sh +++ b/.evergreen/ocsp/ecdsa/mock-valid.sh @@ -3,5 +3,5 @@ python3 ../ocsp_mock.py \ --ca_file ca.pem \ --ocsp_responder_cert ca.crt \ --ocsp_responder_key ca.key \ - -p 8100 \ - -v + -p 8100 \ + -v diff --git a/.evergreen/ocsp/ecdsa/rename.sh b/.evergreen/ocsp/ecdsa/rename.sh index 9c7df02758c..977f268a919 100755 --- a/.evergreen/ocsp/ecdsa/rename.sh +++ b/.evergreen/ocsp/ecdsa/rename.sh @@ -1,7 +1,7 @@ #!/usr/bin/env bash [ ! -f ecdsa-ca-ocsp.pem ] || mv ecdsa-ca-ocsp.pem ca.pem [ ! -f ecdsa-ca-ocsp.crt ] || mv ecdsa-ca-ocsp.crt ca.crt -[ ! -f ecdsa-ca-ocsp.key ] || mv ecdsa-ca-ocsp.key ca.key +[ ! -f ecdsa-ca-ocsp.key ] || mv ecdsa-ca-ocsp.key ca.key [ ! -f ecdsa-server-ocsp.pem ] || mv ecdsa-server-ocsp.pem server.pem [ ! -f ecdsa-server-ocsp-mustStaple.pem ] || mv ecdsa-server-ocsp-mustStaple.pem server-mustStaple.pem [ ! -f ecdsa-ocsp-responder.crt ] || mv ecdsa-ocsp-responder.crt ocsp-responder.crt diff --git a/.evergreen/ocsp/mock_ocsp_responder.py b/.evergreen/ocsp/mock_ocsp_responder.py index 0c64ff2a56b..a53d5396230 100644 --- a/.evergreen/ocsp/mock_ocsp_responder.py +++ b/.evergreen/ocsp/mock_ocsp_responder.py @@ -39,22 +39,21 @@ # See the License for the specific language governing permissions and # limitations under the License. -from __future__ import unicode_literals, division, absolute_import, print_function +from __future__ import absolute_import, division, print_function, unicode_literals -import logging import base64 +import enum import inspect +import logging import re -import enum import sys import textwrap -from datetime import datetime, timezone, timedelta -from typing import Callable, Tuple, Optional +from datetime import datetime, timedelta, timezone -from asn1crypto import x509, keys, core, ocsp +from asn1crypto import core, keys, ocsp, x509 from asn1crypto.ocsp import OCSPRequest, OCSPResponse +from flask import Flask, Response, request from oscrypto import asymmetric -from flask import Flask, request, Response __version__ = '0.10.2' __version_info__ = (0, 10, 2) @@ -66,6 +65,7 @@ else: byte_cls = bytes + def _pretty_message(string, *params): """ Takes a multi-line string and does the following: @@ -111,6 +111,7 @@ def _type_name(value): return cls.__name__ return '%s.%s' % (cls.__module__, cls.__name__) + def _writer(func): """ Decorator for a custom writer, but a default reader @@ -121,7 +122,6 @@ def _writer(func): class OCSPResponseBuilder(object): - _response_status = None _certificate = None _certificate_status = None @@ -184,12 +184,14 @@ def nonce(self, value): """ if not isinstance(value, byte_cls): - raise TypeError(_pretty_message( - ''' - nonce must be a byte string, not %s - ''', - _type_name(value) - )) + raise TypeError( + _pretty_message( + """ + nonce must be a byte string, not %s + """, + _type_name(value), + ) + ) self._nonce = value @@ -205,14 +207,16 @@ def certificate_issuer(self, value): if value is not None: is_oscrypto = isinstance(value, asymmetric.Certificate) if not is_oscrypto and not isinstance(value, x509.Certificate): - raise TypeError(_pretty_message( - ''' - certificate_issuer must be an instance of - asn1crypto.x509.Certificate or - oscrypto.asymmetric.Certificate, not %s - ''', - _type_name(value) - )) + raise TypeError( + _pretty_message( + """ + certificate_issuer must be an instance of + asn1crypto.x509.Certificate or + oscrypto.asymmetric.Certificate, not %s + """, + _type_name(value), + ) + ) if is_oscrypto: value = value.asn1 @@ -228,12 +232,14 @@ def next_update(self, value): """ if not isinstance(value, datetime): - raise TypeError(_pretty_message( - ''' - next_update must be an instance of datetime.datetime, not %s - ''', - _type_name(value) - )) + raise TypeError( + _pretty_message( + """ + next_update must be an instance of datetime.datetime, not %s + """, + _type_name(value), + ) + ) self._next_update = value @@ -253,49 +259,49 @@ def build(self, responder_private_key=None, responder_certificate=None): An asn1crypto.ocsp.OCSPResponse object of the response """ if self._response_status != 'successful': - return ocsp.OCSPResponse({ - 'response_status': self._response_status - }) + return ocsp.OCSPResponse({'response_status': self._response_status}) is_oscrypto = isinstance(responder_private_key, asymmetric.PrivateKey) if not isinstance(responder_private_key, keys.PrivateKeyInfo) and not is_oscrypto: - raise TypeError(_pretty_message( - ''' - responder_private_key must be an instance ofthe c - asn1crypto.keys.PrivateKeyInfo or - oscrypto.asymmetric.PrivateKey, not %s - ''', - _type_name(responder_private_key) - )) + raise TypeError( + _pretty_message( + """ + responder_private_key must be an instance ofthe c + asn1crypto.keys.PrivateKeyInfo or + oscrypto.asymmetric.PrivateKey, not %s + """, + _type_name(responder_private_key), + ) + ) cert_is_oscrypto = isinstance(responder_certificate, asymmetric.Certificate) if not isinstance(responder_certificate, x509.Certificate) and not cert_is_oscrypto: - raise TypeError(_pretty_message( - ''' - responder_certificate must be an instance of - asn1crypto.x509.Certificate or - oscrypto.asymmetric.Certificate, not %s - ''', - _type_name(responder_certificate) - )) + raise TypeError( + _pretty_message( + """ + responder_certificate must be an instance of + asn1crypto.x509.Certificate or + oscrypto.asymmetric.Certificate, not %s + """, + _type_name(responder_certificate), + ) + ) if cert_is_oscrypto: responder_certificate = responder_certificate.asn1 if self._certificate_status_list is None: - raise ValueError(_pretty_message( - ''' - certificate_status_list must be set if the response_status is - "successful" - ''' - )) + raise ValueError( + _pretty_message( + """ + certificate_status_list must be set if the response_status is + "successful" + """ + ) + ) def _make_extension(name, value): - return { - 'extn_id': name, - 'critical': False, - 'extn_value': value - } + return {'extn_id': name, 'critical': False, 'extn_value': value} responses = [] for serial, status in self._certificate_status_list: @@ -304,9 +310,7 @@ def _make_extension(name, value): for name, value in self._response_data_extensions.items(): response_data_extensions.append(_make_extension(name, value)) if self._nonce: - response_data_extensions.append( - _make_extension('nonce', self._nonce) - ) + response_data_extensions.append(_make_extension('nonce', self._nonce)) if not response_data_extensions: response_data_extensions = None @@ -318,12 +322,7 @@ def _make_extension(name, value): single_response_extensions.append( _make_extension( 'certificate_issuer', - [ - x509.GeneralName( - name='directory_name', - value=self._certificate_issuer.subject - ) - ] + [x509.GeneralName(name='directory_name', value=self._certificate_issuer.subject)], ) ) @@ -333,15 +332,9 @@ def _make_extension(name, value): responder_key_hash = getattr(responder_certificate.public_key, self._key_hash_algo) if status == 'good': - cert_status = ocsp.CertStatus( - name='good', - value=core.Null() - ) + cert_status = ocsp.CertStatus(name='good', value=core.Null()) elif status == 'unknown': - cert_status = ocsp.CertStatus( - name='unknown', - value=core.Null() - ) + cert_status = ocsp.CertStatus(name='unknown', value=core.Null()) else: reason = status if status != 'revoked' else 'unspecified' cert_status = ocsp.CertStatus( @@ -349,7 +342,7 @@ def _make_extension(name, value): value={ 'revocation_time': self._revocation_date, 'revocation_reason': reason, - } + }, ) issuer = self._certificate_issuer if self._certificate_issuer else responder_certificate @@ -363,27 +356,27 @@ def _make_extension(name, value): self._next_update = (self._this_update + timedelta(days=7)).replace(microsecond=0) response = { - 'cert_id': { - 'hash_algorithm': { - 'algorithm': self._key_hash_algo - }, - 'issuer_name_hash': getattr(issuer.subject, self._key_hash_algo), - 'issuer_key_hash': getattr(issuer.public_key, self._key_hash_algo), - 'serial_number': serial, - }, - 'cert_status': cert_status, - 'this_update': self._this_update, - 'next_update': self._next_update, - 'single_extensions': single_response_extensions - } + 'cert_id': { + 'hash_algorithm': {'algorithm': self._key_hash_algo}, + 'issuer_name_hash': getattr(issuer.subject, self._key_hash_algo), + 'issuer_key_hash': getattr(issuer.public_key, self._key_hash_algo), + 'serial_number': serial, + }, + 'cert_status': cert_status, + 'this_update': self._this_update, + 'next_update': self._next_update, + 'single_extensions': single_response_extensions, + } responses.append(response) - response_data = ocsp.ResponseData({ - 'responder_id': ocsp.ResponderId(name='by_key', value=responder_key_hash), - 'produced_at': produced_at, - 'responses': responses, - 'response_extensions': response_data_extensions - }) + response_data = ocsp.ResponseData( + { + 'responder_id': ocsp.ResponderId(name='by_key', value=responder_key_hash), + 'produced_at': produced_at, + 'responses': responses, + 'response_extensions': response_data_extensions, + } + ) signature_algo = responder_private_key.algorithm if signature_algo == 'ec': @@ -403,24 +396,31 @@ def _make_extension(name, value): signature_bytes = sign_func(responder_private_key, response_data.dump(), self._hash_algo) certs = None - if self._certificate_issuer and getattr(self._certificate_issuer.public_key, self._key_hash_algo) != responder_key_hash: + if ( + self._certificate_issuer + and getattr(self._certificate_issuer.public_key, self._key_hash_algo) != responder_key_hash + ): certs = [responder_certificate] - return ocsp.OCSPResponse({ - 'response_status': self._response_status, - 'response_bytes': { - 'response_type': 'basic_ocsp_response', - 'response': { - 'tbs_response_data': response_data, - 'signature_algorithm': {'algorithm': signature_algorithm_id}, - 'signature': signature_bytes, - 'certs': certs, - } + return ocsp.OCSPResponse( + { + 'response_status': self._response_status, + 'response_bytes': { + 'response_type': 'basic_ocsp_response', + 'response': { + 'tbs_response_data': response_data, + 'signature_algorithm': {'algorithm': signature_algorithm_id}, + 'signature': signature_bytes, + 'certs': certs, + }, + }, } - }) + ) + # Enums + class ResponseStatus(enum.Enum): successful = 'successful' malformed_request = 'malformed_request' @@ -445,14 +445,14 @@ class CertificateStatus(enum.Enum): # API endpoints -FAULT_REVOKED = "revoked" -FAULT_UNKNOWN = "unknown" +FAULT_REVOKED = 'revoked' +FAULT_UNKNOWN = 'unknown' app = Flask(__name__) -class OCSPResponder: - def __init__(self, issuer_cert: str, responder_cert: str, responder_key: str, - fault: str, next_update_seconds: int): + +class OCSPResponder: + def __init__(self, issuer_cert: str, responder_cert: str, responder_key: str, fault: str, next_update_seconds: int): """ Create a new OCSPResponder instance. @@ -496,8 +496,8 @@ def validate(self): return (CertificateStatus.revoked, time) elif self._fault == FAULT_UNKNOWN: return (CertificateStatus.unknown, None) - elif self._fault != None: - raise NotImplemented('Fault type could not be found') + elif self._fault is not None: + raise NotImplementedError('Fault type could not be found') return (CertificateStatus.good, time) def _build_ocsp_response(self, ocsp_request: OCSPRequest) -> OCSPResponse: @@ -509,7 +509,7 @@ def _build_ocsp_response(self, ocsp_request: OCSPRequest) -> OCSPResponse: request_list = tbs_request['request_list'] if len(request_list) < 1: logger.warning('Received OCSP request with no requests') - raise NotImplemented('Empty requests not supported') + raise NotImplementedError('Empty requests not supported') single_request = request_list[0] # TODO: Support more than one request req_cert = single_request['req_cert'] @@ -525,11 +525,13 @@ def _build_ocsp_response(self, ocsp_request: OCSPRequest) -> OCSPResponse: certificate_status_list = [(serial, certificate_status.value)] # Build the response - builder = OCSPResponseBuilder(**{ - 'response_status': ResponseStatus.successful.value, - 'certificate_status_list': certificate_status_list, - 'revocation_date': revocation_date, - }) + builder = OCSPResponseBuilder( + **{ + 'response_status': ResponseStatus.successful.value, + 'certificate_status_list': certificate_status_list, + 'revocation_date': revocation_date, + } + ) # Parse extensions for extension in tbs_request['request_extensions']: @@ -552,8 +554,7 @@ def _build_ocsp_response(self, ocsp_request: OCSPRequest) -> OCSPResponse: # usually happen, according to RFC 6960 4.1.2), we should throw our # hands up in despair and run. if unknown is True and critical is True: - logger.warning('Could not parse unknown critical extension: %r', - dict(extension.native)) + logger.warning('Could not parse unknown critical extension: %r', dict(extension.native)) return self._fail(ResponseStatus.internal_error) # If it's an unknown non-critical extension, we can safely ignore it. @@ -579,18 +580,28 @@ def build_http_response(self, request_der: bytes) -> Response: responder = None + def init_responder(issuer_cert: str, responder_cert: str, responder_key: str, fault: str, next_update_seconds: int): global responder - responder = OCSPResponder(issuer_cert=issuer_cert, responder_cert=responder_cert, responder_key=responder_key, fault=fault, next_update_seconds=next_update_seconds) + responder = OCSPResponder( + issuer_cert=issuer_cert, + responder_cert=responder_cert, + responder_key=responder_key, + fault=fault, + next_update_seconds=next_update_seconds, + ) + def init(port=8080, debug=False): logger.info('Launching %sserver on port %d', 'debug' if debug else '', port) app.run(port=port, debug=debug) + @app.route('/', methods=['GET']) def _handle_root(): return 'ocsp-responder' + @app.route('/status/', defaults={'u_path': ''}, methods=['GET']) @app.route('/status/', methods=['GET']) def _handle_get(u_path): @@ -599,12 +610,13 @@ def _handle_get(u_path): An OCSP GET request contains the DER-in-base64 encoded OCSP request in the HTTP request URL. """ - if "Host" not in request.headers: - raise ValueError ("Required 'Host' header not present") + if 'Host' not in request.headers: + raise ValueError("Required 'Host' header not present") der = base64.b64decode(u_path) ocsp_request = responder.parse_ocsp_request(der) return responder.build_http_response(ocsp_request) + @app.route('/status', methods=['POST']) def _handle_post(): global responder @@ -612,7 +624,7 @@ def _handle_post(): An OCSP POST request contains the DER encoded OCSP request in the HTTP request body. """ - if "Host" not in request.headers: - raise ValueError ("Required 'Host' header not present") + if 'Host' not in request.headers: + raise ValueError("Required 'Host' header not present") ocsp_request = responder.parse_ocsp_request(request.data) return responder.build_http_response(ocsp_request) diff --git a/.evergreen/ocsp/ocsp_mock.py b/.evergreen/ocsp/ocsp_mock.py index d89b9d45b38..8ba302c09de 100755 --- a/.evergreen/ocsp/ocsp_mock.py +++ b/.evergreen/ocsp/ocsp_mock.py @@ -6,37 +6,52 @@ import argparse import logging -import sys import os +import sys -sys.path.append(os.path.join(os.getcwd() ,'src', 'third_party', 'mock_ocsp_responder')) +sys.path.append(os.path.join(os.getcwd(), 'src', 'third_party', 'mock_ocsp_responder')) import mock_ocsp_responder + def main(): """Main entry point""" - parser = argparse.ArgumentParser(description="MongoDB Mock OCSP Responder.") + parser = argparse.ArgumentParser(description='MongoDB Mock OCSP Responder.') - parser.add_argument('-p', '--port', type=int, default=8080, help="Port to listen on") + parser.add_argument('-p', '--port', type=int, default=8080, help='Port to listen on') - parser.add_argument('--ca_file', type=str, required=True, help="CA file for OCSP responder") + parser.add_argument('--ca_file', type=str, required=True, help='CA file for OCSP responder') - parser.add_argument('-v', '--verbose', action='count', help="Enable verbose tracing") + parser.add_argument('-v', '--verbose', action='count', help='Enable verbose tracing') - parser.add_argument('--ocsp_responder_cert', type=str, required=True, help="OCSP Responder Certificate") + parser.add_argument('--ocsp_responder_cert', type=str, required=True, help='OCSP Responder Certificate') - parser.add_argument('--ocsp_responder_key', type=str, required=True, help="OCSP Responder Keyfile") + parser.add_argument('--ocsp_responder_key', type=str, required=True, help='OCSP Responder Keyfile') - parser.add_argument('--fault', choices=[mock_ocsp_responder.FAULT_REVOKED, mock_ocsp_responder.FAULT_UNKNOWN, None], default=None, type=str, help="Specify a specific fault to test") + parser.add_argument( + '--fault', + choices=[mock_ocsp_responder.FAULT_REVOKED, mock_ocsp_responder.FAULT_UNKNOWN, None], + default=None, + type=str, + help='Specify a specific fault to test', + ) - parser.add_argument('--next_update_seconds', type=int, default=32400, help="Specify how long the OCSP response should be valid for") + parser.add_argument( + '--next_update_seconds', type=int, default=32400, help='Specify how long the OCSP response should be valid for' + ) args = parser.parse_args() if args.verbose: logging.basicConfig(level=logging.DEBUG) print('Initializing OCSP Responder') - mock_ocsp_responder.init_responder(issuer_cert=args.ca_file, responder_cert=args.ocsp_responder_cert, responder_key=args.ocsp_responder_key, fault=args.fault, next_update_seconds=args.next_update_seconds) + mock_ocsp_responder.init_responder( + issuer_cert=args.ca_file, + responder_cert=args.ocsp_responder_cert, + responder_key=args.ocsp_responder_key, + fault=args.fault, + next_update_seconds=args.next_update_seconds, + ) if args.verbose: mock_ocsp_responder.init(args.port, debug=True) @@ -45,5 +60,6 @@ def main(): print('Mock OCSP Responder is running on port %s' % (str(args.port))) + if __name__ == '__main__': main() diff --git a/.evergreen/ocsp/rsa/mock-delegate-revoked.sh b/.evergreen/ocsp/rsa/mock-delegate-revoked.sh index adf026ce1b3..d7edc9b659b 100755 --- a/.evergreen/ocsp/rsa/mock-delegate-revoked.sh +++ b/.evergreen/ocsp/rsa/mock-delegate-revoked.sh @@ -3,6 +3,6 @@ python3 ../ocsp_mock.py \ --ca_file ca.pem \ --ocsp_responder_cert ocsp_responder.crt \ --ocsp_responder_key ocsp_responder.key \ - -p 8100 \ - -v \ - --fault revoked + -p 8100 \ + -v \ + --fault revoked diff --git a/.evergreen/ocsp/rsa/mock-delegate-valid.sh b/.evergreen/ocsp/rsa/mock-delegate-valid.sh index 5074a7ecabd..5b0d33b4f94 100755 --- a/.evergreen/ocsp/rsa/mock-delegate-valid.sh +++ b/.evergreen/ocsp/rsa/mock-delegate-valid.sh @@ -3,5 +3,5 @@ python3 ../ocsp_mock.py \ --ca_file ca.pem \ --ocsp_responder_cert ocsp-responder.crt \ --ocsp_responder_key ocsp-responder.key \ - -p 8100 \ - -v + -p 8100 \ + -v diff --git a/.evergreen/ocsp/rsa/mock-revoked.sh b/.evergreen/ocsp/rsa/mock-revoked.sh index 4a17926b922..209629fbc75 100755 --- a/.evergreen/ocsp/rsa/mock-revoked.sh +++ b/.evergreen/ocsp/rsa/mock-revoked.sh @@ -3,6 +3,6 @@ python3 ../ocsp_mock.py \ --ca_file ca.pem \ --ocsp_responder_cert ca.crt \ --ocsp_responder_key ca.key \ - -p 8100 \ - -v \ - --fault revoked + -p 8100 \ + -v \ + --fault revoked diff --git a/.evergreen/ocsp/rsa/mock-valid.sh b/.evergreen/ocsp/rsa/mock-valid.sh index c89ce9e954d..d015870e5a9 100755 --- a/.evergreen/ocsp/rsa/mock-valid.sh +++ b/.evergreen/ocsp/rsa/mock-valid.sh @@ -3,5 +3,5 @@ python3 ../ocsp_mock.py \ --ca_file ca.pem \ --ocsp_responder_cert ca.crt \ --ocsp_responder_key ca.key \ - -p 8100 \ - -v + -p 8100 \ + -v diff --git a/.evergreen/scripts/build-docs.sh b/.evergreen/scripts/build-docs.sh index b6f4e579f66..2389e068718 100755 --- a/.evergreen/scripts/build-docs.sh +++ b/.evergreen/scripts/build-docs.sh @@ -6,7 +6,7 @@ set -o errexit # Exit the script with error if any of the commands fail . "$MONGOC_DIR/.evergreen/scripts/install-build-tools.sh" # Check that a CLion user didn't accidentally convert NEWS from UTF-8 to ASCII -grep "á" NEWS > /dev/null || (echo "NEWS file appears to have lost its UTF-8 encoding?" || exit 1) +grep "á" NEWS >/dev/null || (echo "NEWS file appears to have lost its UTF-8 encoding?" || exit 1) build_dir=$MONGOC_DIR/_build/for-docs uvx cmake --fresh -G "Ninja" -S "$MONGOC_DIR" -B "$build_dir" \ diff --git a/.evergreen/scripts/build_snapshot_rpm.sh b/.evergreen/scripts/build_snapshot_rpm.sh index 05b7f481260..2956e15daf9 100755 --- a/.evergreen/scripts/build_snapshot_rpm.sh +++ b/.evergreen/scripts/build_snapshot_rpm.sh @@ -22,7 +22,6 @@ set -o errexit # limitations under the License. # - for arg in "$@"; do if [ "$arg" = "-h" ]; then echo "Usage: .evergreen/scripts/build_snapshot_rpm.sh" @@ -58,7 +57,7 @@ fi if [ -f "${spec_file}" ]; then echo "Found old spec file (${spec_file})...removing" - rm -f ${spec_file} + rm -f ${spec_file} fi cp "$(pwd)/.evergreen/etc/${package}.spec" .. if [ -f .evergreen/etc/spec.patch ]; then @@ -119,13 +118,17 @@ git archive --format=tar --output="$tar_filepath" --prefix="$tar_filestem/" HEAD mkdir -p "$tar_filestem" cp VERSION_CURRENT "$tar_filestem/." tar -rf "$tar_filepath" "$tar_filestem/" -gzip --keep "$tar_filepath" --stdout > "$tgz_filepath" +gzip --keep "$tar_filepath" --stdout >"$tgz_filepath" echo "Building source RPM ..." rpmbuild -bs ${spec_file} echo "Building binary RPMs ..." mock_result=$(readlink -f ../mock-result) -sudo mock --resultdir="${mock_result}" --use-bootstrap-image --isolation=simple -r ${config} --no-clean --no-cleanup-after --rebuild ~/rpmbuild/SRPMS/${package}-${snapshot_version}*.src.rpm || ( cd "${mock_result}" ; cat *.log ; exit 1 ) +sudo mock --resultdir="${mock_result}" --use-bootstrap-image --isolation=simple -r ${config} --no-clean --no-cleanup-after --rebuild ~/rpmbuild/SRPMS/${package}-${snapshot_version}*.src.rpm || ( + cd "${mock_result}" + cat *.log + exit 1 +) sudo mock -r ${config} --use-bootstrap-image --isolation=simple --copyin "${mock_result}" /tmp sudo mock -r ${config} --use-bootstrap-image --isolation=simple --cwd "/tmp/${build_dir}" --chroot -- /bin/sh -c "( @@ -140,4 +143,7 @@ if [ ! -e "${mock_root}/tmp/${build_dir}/example-client" ]; then fi sudo mock -r ${config} --use-bootstrap-image --isolation=simple --clean -(cd "${mock_result}" ; tar zcvf ../rpm.tar.gz *.rpm) +( + cd "${mock_result}" + tar zcvf ../rpm.tar.gz *.rpm +) diff --git a/.evergreen/scripts/check-preludes.py b/.evergreen/scripts/check-preludes.py index 3b5807f3707..92901e70f32 100644 --- a/.evergreen/scripts/check-preludes.py +++ b/.evergreen/scripts/check-preludes.py @@ -14,56 +14,56 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Check that public libbson/libmongoc headers all include the prelude line. -""" +"""Check that public libbson/libmongoc headers all include the prelude line.""" + import sys from pathlib import Path if len(sys.argv) != 2: - print("Usage: python check-preludes.py ") + print('Usage: python check-preludes.py ') sys.exit(1) -MONGOC_PREFIX = Path("src/libmongoc/src/mongoc") -BSON_PREFIX = Path("src/libbson/src/bson") -COMMON_PREFIX = Path("src/common/src") +MONGOC_PREFIX = Path('src/libmongoc/src/mongoc') +BSON_PREFIX = Path('src/libbson/src/bson') +COMMON_PREFIX = Path('src/common/src') checks = [ { - "name": "libmongoc", - "headers": list(MONGOC_PREFIX.glob("mongoc-*.h")), - "exclusions": [ - MONGOC_PREFIX / "mongoc-prelude.h", - MONGOC_PREFIX / "mongoc.h", + 'name': 'libmongoc', + 'headers': list(MONGOC_PREFIX.glob('mongoc-*.h')), + 'exclusions': [ + MONGOC_PREFIX / 'mongoc-prelude.h', + MONGOC_PREFIX / 'mongoc.h', ], - "include": "#include ", + 'include': '#include ', }, { - "name": "libbson", - "headers": list(BSON_PREFIX.glob("bson-*.h")), - "exclusions": [ - BSON_PREFIX / "bson-prelude.h", - BSON_PREFIX / "bson.h", + 'name': 'libbson', + 'headers': list(BSON_PREFIX.glob('bson-*.h')), + 'exclusions': [ + BSON_PREFIX / 'bson-prelude.h', + BSON_PREFIX / 'bson.h', ], - "include": "#include ", + 'include': '#include ', }, { - "name": "common", - "headers": list(COMMON_PREFIX.glob("*.h")), - "exclusions": [COMMON_PREFIX / "common-prelude.h"], - "include": "#include ", + 'name': 'common', + 'headers': list(COMMON_PREFIX.glob('*.h')), + 'exclusions': [COMMON_PREFIX / 'common-prelude.h'], + 'include': '#include ', }, ] for check in checks: - NAME = check["name"] - print(f"Checking headers for {NAME}") - assert len(check["headers"]) > 0 - for header in check["headers"]: - if header in check["exclusions"] or header.name.endswith("-private.h"): + NAME = check['name'] + print(f'Checking headers for {NAME}') + assert len(check['headers']) > 0 + for header in check['headers']: + if header in check['exclusions'] or header.name.endswith('-private.h'): continue - lines = Path(header).read_text(encoding="utf-8").splitlines() - if check["include"] not in lines: - print(f"{header} did not include prelude") + lines = Path(header).read_text(encoding='utf-8').splitlines() + if check['include'] not in lines: + print(f'{header} did not include prelude') sys.exit(1) -print("All checks passed") +print('All checks passed') diff --git a/.evergreen/scripts/check-symlink.sh b/.evergreen/scripts/check-symlink.sh index 97ca241014c..034a4c3e53e 100755 --- a/.evergreen/scripts/check-symlink.sh +++ b/.evergreen/scripts/check-symlink.sh @@ -1,5 +1,4 @@ -check_symlink() -{ +check_symlink() { SYMLINK="$INSTALL_DIR/lib/$1" EXPECTED_TARGET="$2" diff --git a/.evergreen/scripts/check_rpm_spec.sh b/.evergreen/scripts/check_rpm_spec.sh index a669842c24c..6d67c47b8ff 100755 --- a/.evergreen/scripts/check_rpm_spec.sh +++ b/.evergreen/scripts/check_rpm_spec.sh @@ -24,8 +24,7 @@ set -o errexit # Supported/used environment variables: # IS_PATCH If "true", this is an Evergreen patch build. - -on_exit () { +on_exit() { if [ -n "${SPEC_FILE}" ]; then rm -f "${SPEC_FILE}" fi @@ -33,11 +32,14 @@ on_exit () { trap on_exit EXIT if [ "${IS_PATCH}" = "true" ]; then - echo "This is a patch build...skipping RPM spec check" - exit + echo "This is a patch build...skipping RPM spec check" + exit fi SPEC_FILE=$(mktemp --tmpdir -u mongo-c-driver.XXXXXXXX.spec) curl --retry 5 https://src.fedoraproject.org/rpms/mongo-c-driver/raw/rawhide/f/mongo-c-driver.spec -sS --max-time 120 --fail --output "${SPEC_FILE}" -diff -q .evergreen/etc/mongo-c-driver.spec "${SPEC_FILE}" || (echo "Synchronize RPM spec from downstream to fix this failure. See instructions here: https://docs.google.com/document/d/1ItyBC7VN383zNXu3oUOQJYR7adfYI8ECjLMJ5kqA9X8/edit#heading=h.ahdrr3b5xv3"; exit 1) +diff -q .evergreen/etc/mongo-c-driver.spec "${SPEC_FILE}" || ( + echo "Synchronize RPM spec from downstream to fix this failure. See instructions here: https://docs.google.com/document/d/1ItyBC7VN383zNXu3oUOQJYR7adfYI8ECjLMJ5kqA9X8/edit#heading=h.ahdrr3b5xv3" + exit 1 +) diff --git a/.evergreen/scripts/compile-libmongocrypt.sh b/.evergreen/scripts/compile-libmongocrypt.sh index e719fafbbee..1debf6c95e5 100755 --- a/.evergreen/scripts/compile-libmongocrypt.sh +++ b/.evergreen/scripts/compile-libmongocrypt.sh @@ -1,9 +1,12 @@ #!/usr/bin/env bash compile_libmongocrypt() { - declare -r cmake_binary="${1:?"missing path to CMake binary"}"; shift - declare -r mongoc_dir="${1:?"missing path to mongoc directory"}"; shift - declare -r install_dir="${1:?"missing path to install directory"}"; shift + declare -r cmake_binary="${1:?"missing path to CMake binary"}" + shift + declare -r mongoc_dir="${1:?"missing path to mongoc directory"}" + shift + declare -r install_dir="${1:?"missing path to install directory"}" + shift # When updating libmongocrypt, also update openssl-compat-check.sh and the copy of libmongocrypt's kms-message in # `src/kms-message`. diff --git a/.evergreen/scripts/debian_package_build.sh b/.evergreen/scripts/debian_package_build.sh index 9c54520600e..d1bda19cac1 100755 --- a/.evergreen/scripts/debian_package_build.sh +++ b/.evergreen/scripts/debian_package_build.sh @@ -8,7 +8,7 @@ set -o errexit -on_exit () { +on_exit() { if [ -e ./unstable-chroot/debootstrap/debootstrap.log ]; then echo "Dumping debootstrap.log (64-bit)" cat ./unstable-chroot/debootstrap/debootstrap.log @@ -24,7 +24,7 @@ git config user.email "evergreen-build@example.com" git config user.name "Evergreen Build" if [ "${IS_PATCH}" = "true" ]; then - git diff HEAD > ../upstream.patch + git diff HEAD >../upstream.patch git clean -fdx git reset --hard HEAD git remote add upstream https://github.com/mongodb/mongo-c-driver @@ -36,7 +36,7 @@ if [ "${IS_PATCH}" = "true" ]; then if [ -s ../upstream.patch ]; then [ -d debian/patches ] || mkdir debian/patches mv ../upstream.patch debian/patches/ - echo upstream.patch >> debian/patches/series + echo upstream.patch >>debian/patches/series git add debian/patches/* git commit -m 'Evergreen patch build - upstream changes' git log -n1 -p @@ -46,7 +46,7 @@ fi cd .. git clone https://salsa.debian.org/installer-team/debootstrap.git debootstrap.git -export DEBOOTSTRAP_DIR=`pwd`/debootstrap.git +export DEBOOTSTRAP_DIR=$(pwd)/debootstrap.git sudo -E ./debootstrap.git/debootstrap --variant=buildd unstable ./unstable-chroot/ http://cdn-aws.deb.debian.org/debian cp -a mongoc ./unstable-chroot/tmp/ sudo chroot ./unstable-chroot /bin/bash -c '(\ @@ -68,8 +68,14 @@ sudo chroot ./unstable-chroot /bin/bash -c '(\ dpkg -i ../*.deb && \ gcc $(pkgconf --cflags bson2 mongoc2) -o example-client src/libmongoc/examples/example-client.c -lmongoc2 -lbson2 )' -[ -e ./unstable-chroot/tmp/mongoc/example-client ] || (echo "Example was not built!" ; exit 1) -(cd ./unstable-chroot/tmp/ ; tar zcvf ../../deb.tar.gz *.dsc *.orig.tar.gz *.debian.tar.xz *.build *.deb) +[ -e ./unstable-chroot/tmp/mongoc/example-client ] || ( + echo "Example was not built!" + exit 1 +) +( + cd ./unstable-chroot/tmp/ + tar zcvf ../../deb.tar.gz *.dsc *.orig.tar.gz *.debian.tar.xz *.build *.deb +) # Build a second time, to ensure a "double build" works sudo chroot ./unstable-chroot /bin/bash -c "(\ @@ -100,8 +106,14 @@ sudo chroot ./unstable-i386-chroot /bin/bash -c '(\ dpkg -i ../*.deb && \ gcc $(pkgconf --cflags bson2 mongoc2) -o example-client src/libmongoc/examples/example-client.c -lmongoc2 -lbson2 )' -[ -e ./unstable-i386-chroot/tmp/mongoc/example-client ] || (echo "Example was not built!" ; exit 1) -(cd ./unstable-i386-chroot/tmp/ ; tar zcvf ../../deb-i386.tar.gz *.dsc *.orig.tar.gz *.debian.tar.xz *.build *.deb) +[ -e ./unstable-i386-chroot/tmp/mongoc/example-client ] || ( + echo "Example was not built!" + exit 1 +) +( + cd ./unstable-i386-chroot/tmp/ + tar zcvf ../../deb-i386.tar.gz *.dsc *.orig.tar.gz *.debian.tar.xz *.build *.deb +) # Build a second time, to ensure a "double build" works sudo chroot ./unstable-i386-chroot /bin/bash -c "(\ diff --git a/.evergreen/scripts/debug-core-evergreen.sh b/.evergreen/scripts/debug-core-evergreen.sh index de0be7c542a..3b4b1336108 100755 --- a/.evergreen/scripts/debug-core-evergreen.sh +++ b/.evergreen/scripts/debug-core-evergreen.sh @@ -1,21 +1,21 @@ #!/usr/bin/env bash if [[ "${OSTYPE}" == "cygwin" ]]; then - echo "Skipping debug-core-evergreen.sh" - exit + echo "Skipping debug-core-evergreen.sh" + exit fi shopt -s nullglob for i in *.core; do - echo $i - echo "backtrace full" | gdb -q ./cmake-build/src/libmongoc/test-libmongoc $i + echo $i + echo "backtrace full" | gdb -q ./cmake-build/src/libmongoc/test-libmongoc $i done # If there is still a test-libmongoc process running (perhaps due to # deadlock, or very slow test) attach a debugger and print stacks. TEST_LIBMONGOC_PID="$(pgrep test-libmongoc)" if [ -n "$TEST_LIBMONGOC_PID" ]; then - echo "test-libmongoc processes still running with PID=$TEST_LIBMONGOC_PID" - echo "backtrace full" | gdb -q -p $TEST_LIBMONGOC_PID - kill $TEST_LIBMONGOC_PID + echo "test-libmongoc processes still running with PID=$TEST_LIBMONGOC_PID" + echo "backtrace full" | gdb -q -p $TEST_LIBMONGOC_PID + kill $TEST_LIBMONGOC_PID fi diff --git a/.evergreen/scripts/integration-tests.sh b/.evergreen/scripts/integration-tests.sh index 4ea2d5effb3..16b307ef2d9 100755 --- a/.evergreen/scripts/integration-tests.sh +++ b/.evergreen/scripts/integration-tests.sh @@ -16,7 +16,7 @@ # This script may be run locally. # -set -o errexit # Exit the script with error if any of the commands fail +set -o errexit # Exit the script with error if any of the commands fail # shellcheck source=.evergreen/scripts/env-var-utils.sh . "$(dirname "${BASH_SOURCE[0]:?}")/env-var-utils.sh" @@ -37,7 +37,7 @@ script_dir="$(to_absolute "$(dirname "${BASH_SOURCE[0]:?}")")" export DRIVERS_TOOLS DRIVERS_TOOLS="$(cd ../drivers-evergreen-tools && pwd)" # ./mongoc -> ./drivers-evergreen-tools if [[ "${OSTYPE:?}" == cygwin ]]; then - DRIVERS_TOOLS="$(cygpath -m "${DRIVERS_TOOLS:?}")" + DRIVERS_TOOLS="$(cygpath -m "${DRIVERS_TOOLS:?}")" fi export MONGO_ORCHESTRATION_HOME="${DRIVERS_TOOLS:?}/.evergreen/orchestration" @@ -46,12 +46,12 @@ export PATH="${MONGODB_BINARIES:?}:$PATH" # Workaround absence of `tls=true` URI in the `mongodb_auth_uri` field returned by mongo orchestration. if [[ -n "${REQUIRE_API_VERSION:-}" && "${SSL:?}" != nossl ]]; then - prev='$MONGODB_BINARIES/mongosh $URI $MONGO_ORCHESTRATION_HOME/require-api-version.js' + prev='$MONGODB_BINARIES/mongosh $URI $MONGO_ORCHESTRATION_HOME/require-api-version.js' - # Use `--tlsAllowInvalidCertificates` to avoid self-signed certificate errors. - next='$MONGODB_BINARIES/mongosh --tls --tlsAllowInvalidCertificates $URI $MONGO_ORCHESTRATION_HOME/require-api-version.js' + # Use `--tlsAllowInvalidCertificates` to avoid self-signed certificate errors. + next='$MONGODB_BINARIES/mongosh --tls --tlsAllowInvalidCertificates $URI $MONGO_ORCHESTRATION_HOME/require-api-version.js' - sed -i -e "s|${prev:?}|${next:?}|" "${DRIVERS_TOOLS:?}/.evergreen/run-orchestration.sh" + sed -i -e "s|${prev:?}|${next:?}|" "${DRIVERS_TOOLS:?}/.evergreen/run-orchestration.sh" fi "${DRIVERS_TOOLS:?}/.evergreen/run-orchestration.sh" @@ -60,19 +60,19 @@ echo "Waiting for mongo-orchestration to start..." wait_for_mongo_orchestration() { declare port="${1:?"wait_for_mongo_orchestration requires a server port"}" - for _ in $(seq 300); do - # Exit code 7: "Failed to connect to host". - if - curl -s --max-time 1 "localhost:${port:?}" >/dev/null - test $? -ne 7 - then - return 0 - else - sleep 1 - fi - done - echo "Could not detect mongo-orchestration on port ${port:?}" - return 1 + for _ in $(seq 300); do + # Exit code 7: "Failed to connect to host". + if + curl -s --max-time 1 "localhost:${port:?}" >/dev/null + test $? -ne 7 + then + return 0 + else + sleep 1 + fi + done + echo "Could not detect mongo-orchestration on port ${port:?}" + return 1 } wait_for_mongo_orchestration 8889 echo "Waiting for mongo-orchestration to start... done." diff --git a/.evergreen/scripts/kms-divergence-check.sh b/.evergreen/scripts/kms-divergence-check.sh index 46f4adaafc4..e2a940ed230 100755 --- a/.evergreen/scripts/kms-divergence-check.sh +++ b/.evergreen/scripts/kms-divergence-check.sh @@ -16,9 +16,9 @@ LIBMONGOCRYPT_DIR="$MONGOC_DIR/libmongocrypt-for-kms-divergence-check" LIBMONGOCRYPT_GITREF="34a9572c416e0827a1fa988baf88411c4b5f2c7b" cleanup() { - if [ -d "$LIBMONGOCRYPT_DIR" ]; then - rm -rf "$LIBMONGOCRYPT_DIR" - fi + if [ -d "$LIBMONGOCRYPT_DIR" ]; then + rm -rf "$LIBMONGOCRYPT_DIR" + fi } cleanup @@ -27,9 +27,9 @@ trap cleanup EXIT git clone -q https://github.com/mongodb/libmongocrypt "$LIBMONGOCRYPT_DIR" cd "$LIBMONGOCRYPT_DIR" git checkout "$LIBMONGOCRYPT_GITREF" --quiet -if ! diff -uNr "$LIBMONGOCRYPT_DIR/kms-message/" "$MONGOC_DIR/src/kms-message/" ; then - echo "Unexpected differences found in KMS sources!" - exit 1 +if ! diff -uNr "$LIBMONGOCRYPT_DIR/kms-message/" "$MONGOC_DIR/src/kms-message/"; then + echo "Unexpected differences found in KMS sources!" + exit 1 else - echo "No changes detected from KMS message at commit $LIBMONGOCRYPT_GITREF" + echo "No changes detected from KMS message at commit $LIBMONGOCRYPT_GITREF" fi diff --git a/.evergreen/scripts/link-sample-program-bson.sh b/.evergreen/scripts/link-sample-program-bson.sh index 2ad2f73e112..a0bdda873a2 100755 --- a/.evergreen/scripts/link-sample-program-bson.sh +++ b/.evergreen/scripts/link-sample-program-bson.sh @@ -1,11 +1,10 @@ #!/usr/bin/env bash -set -o errexit # Exit the script with error if any of the commands fail +set -o errexit # Exit the script with error if any of the commands fail # Supported/used environment variables: # LINK_STATIC Whether to statically link to libbson # BUILD_SAMPLE_WITH_CMAKE Link program w/ CMake. Default: use pkg-config. - echo "LINK_STATIC=$LINK_STATIC BUILD_SAMPLE_WITH_CMAKE=$BUILD_SAMPLE_WITH_CMAKE" DIR=$(dirname $0) @@ -16,8 +15,8 @@ export CMAKE_GENERATOR="Ninja" # The major version of the project. Appears in certain install filenames. _full_version=$(cat "$DIR/../../VERSION_CURRENT") -version="${_full_version%-*}" # 1.2.3-dev → 1.2.3 -major="${version%%.*}" # 1.2.3 → 1 +version="${_full_version%-*}" # 1.2.3-dev → 1.2.3 +major="${version%%.*}" # 1.2.3 → 1 echo "major version: $major" echo " full version: $version" @@ -31,7 +30,7 @@ else LDD=ldd fi -SRCROOT=`pwd` +SRCROOT=$(pwd) SCRATCH_DIR=$(pwd)/.scratch rm -rf "$SCRATCH_DIR" mkdir -p "$SCRATCH_DIR" diff --git a/.evergreen/scripts/link-sample-program.sh b/.evergreen/scripts/link-sample-program.sh index 15725d5d193..8c600cf9cd5 100755 --- a/.evergreen/scripts/link-sample-program.sh +++ b/.evergreen/scripts/link-sample-program.sh @@ -1,5 +1,5 @@ #!/usr/bin/env bash -set -o errexit # Exit the script with error if any of the commands fail +set -o errexit # Exit the script with error if any of the commands fail # Supported/used environment variables: # LINK_STATIC Whether to statically link to libmongoc @@ -8,7 +8,6 @@ set -o errexit # Exit the script with error if any of the commands fail # ENABLE_SNAPPY Set -DENABLE_SNAPPY # CMAKE Path to cmake executable. - echo "LINK_STATIC=$LINK_STATIC BUILD_SAMPLE_WITH_CMAKE=$BUILD_SAMPLE_WITH_CMAKE" : "${UV_INSTALL_DIR:?}" @@ -21,8 +20,8 @@ export CMAKE_GENERATOR="Ninja" # The major version of the project. Appears in certain install filenames. _full_version=$(cat "$DIR/../../VERSION_CURRENT") -version="${_full_version%-*}" # 1.2.3-dev → 1.2.3 -major="${version%%.*}" # 1.2.3 → 1 +version="${_full_version%-*}" # 1.2.3-dev → 1.2.3 +major="${version%%.*}" # 1.2.3 → 1 echo "major version: $major" echo " full version: $version" @@ -36,7 +35,7 @@ else LDD=ldd fi -SRCROOT=`pwd` +SRCROOT=$(pwd) SCRATCH_DIR=$(pwd)/.scratch rm -rf "$SCRATCH_DIR" mkdir -p "$SCRATCH_DIR" @@ -70,15 +69,14 @@ fi if [ "$ENABLE_SSL" ]; then if [ "$OS" = "darwin" ]; then - SSL_CMAKE_OPTION="-DENABLE_SSL:BOOL=DARWIN" + SSL_CMAKE_OPTION="-DENABLE_SSL:BOOL=DARWIN" else - SSL_CMAKE_OPTION="-DENABLE_SSL:BOOL=OPENSSL" + SSL_CMAKE_OPTION="-DENABLE_SSL:BOOL=OPENSSL" fi else SSL_CMAKE_OPTION="-DENABLE_SSL:BOOL=OFF" fi - if [ "$LINK_STATIC" ]; then STATIC_CMAKE_OPTION="-DENABLE_STATIC=ON -DENABLE_TESTS=ON" else diff --git a/.evergreen/scripts/man-pages-to-html.sh b/.evergreen/scripts/man-pages-to-html.sh index e7e5b5fdf80..df6d7ea8ac3 100755 --- a/.evergreen/scripts/man-pages-to-html.sh +++ b/.evergreen/scripts/man-pages-to-html.sh @@ -38,7 +38,7 @@ echo "
    "
     
     for doc in $2/*.3; do
    -  fullpath=`pwd`/$doc
    +  fullpath=$(pwd)/$doc
       name=$(basename $doc)
     
       if [ ! -e "$fullpath" ]; then
    diff --git a/.evergreen/scripts/openssl-compat-setup.sh b/.evergreen/scripts/openssl-compat-setup.sh
    index f009102241e..76cde08ae7e 100755
    --- a/.evergreen/scripts/openssl-compat-setup.sh
    +++ b/.evergreen/scripts/openssl-compat-setup.sh
    @@ -25,14 +25,14 @@ openssl_install_dir="${mongoc_dir:?}/openssl-install-dir"
     openssl_download "${OPENSSL_VERSION:?}"
     
     rm -rf "${openssl_install_dir:?}"
    -mkdir  "${openssl_install_dir:?}" # For openssl.cnf.
    +mkdir "${openssl_install_dir:?}" # For openssl.cnf.
     
     declare -a config_flags=(
       "--prefix=${openssl_install_dir:?}"
       "--openssldir=${openssl_install_dir:?}/ssl"
       "--libdir=lib"
    -  "shared" # Enable shared libraries.
    -  "-fPIC" # For static libraries.
    +  "shared"                              # Enable shared libraries.
    +  "-fPIC"                               # For static libraries.
       "-Wl,-rpath,${openssl_install_dir:?}" # For shared libraries.
     )
     
    @@ -79,9 +79,9 @@ echo "Building and installing OpenSSL ${OPENSSL_VERSION:?}..."
           PATH="${openssl_install_dir:?}/bin:${PATH:-}" \
           LD_LIBRARY_PATH="${openssl_install_dir:?}/lib:${LD_LIBRARY_PATH:-}" \
           openssl fipsinstall \
    -        -out "${openssl_install_dir:?}/ssl/fipsmodule.cnf" \
    -        -module "${openssl_install_dir:?}/lib/ossl-modules/fips.so" \
    -        -quiet
    +      -out "${openssl_install_dir:?}/ssl/fipsmodule.cnf" \
    +      -module "${openssl_install_dir:?}/lib/ossl-modules/fips.so" \
    +      -quiet
     
         # Verification.
         echo "Verifying OpenSSL FIPS 3.0 module is enabled..."
    diff --git a/.evergreen/scripts/patch-uv-installer.sh b/.evergreen/scripts/patch-uv-installer.sh
    index 68654010d83..046263b3e26 100755
    --- a/.evergreen/scripts/patch-uv-installer.sh
    +++ b/.evergreen/scripts/patch-uv-installer.sh
    @@ -13,71 +13,71 @@ download_checksums() {
     
     # Patches the specified uv-installer.sh script with checksums.
     patch_uv_installer() {
    -    declare script version
    -    script="${1:?"usage: patch_uv_installer  "}"
    -    version="${2:?"usage: patch_uv_installer  "}"
    +  declare script version
    +  script="${1:?"usage: patch_uv_installer  "}"
    +  version="${2:?"usage: patch_uv_installer  "}"
     
    -    [[ -f "${script:?}" ]] || {
    -        echo "${script:?} does not exist?"
    -        return 1
    -    } >&2
    +  [[ -f "${script:?}" ]] || {
    +    echo "${script:?} does not exist?"
    +    return 1
    +  } >&2
     
    -    command -v perl >/dev/null || return
    +  command -v perl >/dev/null || return
     
    -    # Ensure the uv-installer.sh script's version matches the expected version.
    -    app_version="$(perl -lne 'print $1 if m|APP_VERSION="([^"]+)"|' "${script:?}")" || return
    +  # Ensure the uv-installer.sh script's version matches the expected version.
    +  app_version="$(perl -lne 'print $1 if m|APP_VERSION="([^"]+)"|' "${script:?}")" || return
     
    -    [[ "${app_version:?}" == "${version:?}" ]] || {
    -        echo "${script:?} version ${app_version:?} does not match expected version ${version:?}"
    -        return 1
    -    } >&2
    +  [[ "${app_version:?}" == "${version:?}" ]] || {
    +    echo "${script:?} version ${app_version:?} does not match expected version ${version:?}"
    +    return 1
    +  } >&2
     
    -    # The output of the `download_checksums` helper function.
    -    checksums=(
    -        c3eddc0e314abb8588f1cdf312f0b060d79e1906eff8f43b64a05ff5e2727872  uv-aarch64-apple-darwin.tar.gz
    -        5b3a80d385d26fb9f63579a0712d020ec413ada38a6900e88fdfd41b58795b7e *uv-aarch64-pc-windows-msvc.zip
    -        3bb77b764618f65a969da063d1c4a507d8de5360ca2858f771cab109fa879a4d  uv-aarch64-unknown-linux-gnu.tar.gz
    -        40ba6e62de35820e8460eacee2b5b8f4add70a834d3859f7a60cdfc6b19ab599  uv-aarch64-unknown-linux-musl.tar.gz
    -        f108a49a17b0700d7121b0215575f96c46a203774ed80ef40544005d7af74a67  uv-arm-unknown-linux-musleabihf.tar.gz
    -        730d8ef57f221ecc572d47b227ecbd8261be08157efb351311f7bc1f6c1c944a  uv-armv7-unknown-linux-gnueabihf.tar.gz
    -        b78dacab7c2fb352301d8997c0c705c3959a4e44d7b3afe670aee2397a2c9ab3  uv-armv7-unknown-linux-musleabihf.tar.gz
    -        08482edef8b077e12e73f76e6b4bb0300c054b8009cfac5cc354297f47d24623 *uv-i686-pc-windows-msvc.zip
    -        0ce384911d4af9007576ceba2557c5d474a953ced34602ee4e09bd888cee13c0  uv-i686-unknown-linux-gnu.tar.gz
    -        b6462dc8190c7a1eafa74287d8ff213764baa49e098aeeb522fa479d29e1c0bf  uv-i686-unknown-linux-musl.tar.gz
    -        9a8e8a8927df9fa39af79214ab1acfc227dba9d9e690a424cef1dc17296161a8  uv-powerpc64-unknown-linux-gnu.tar.gz
    -        4880a8e2ba5086e7ed4bd3aecfdae5e353da569ddaac02cd3db598b4c8e77193  uv-powerpc64le-unknown-linux-gnu.tar.gz
    -        0cd68055cedbc5b1194e7e7ab2b35ac7aa1d835c586fb3778c7acb0e8a8ac822  uv-riscv64gc-unknown-linux-gnu.tar.gz
    -        8cc2e70bee35c9e437c2308f130b79acc0d7c43e710296990ed76e702e220912  uv-s390x-unknown-linux-gnu.tar.gz
    -        b799253441726351bc60c2e91254a821001e5e2e22a0e2b077d8983f583e8139  uv-x86_64-apple-darwin.tar.gz
    -        60870fa18d438737088e533ed06617549e42531c522cc9a8fe4455d8e745dc29 *uv-x86_64-pc-windows-msvc.zip
    -        8ca3db7b2a3199171cfc0870be1f819cb853ddcec29a5fa28dae30278922b7ba  uv-x86_64-unknown-linux-gnu.tar.gz
    -        38ade73396b48fce89d9d1cb8a7e8f02b6e18a2d87467525ee8fb7e09899f70d  uv-x86_64-unknown-linux-musl.tar.gz
    -    )
    +  # The output of the `download_checksums` helper function.
    +  checksums=(
    +    c3eddc0e314abb8588f1cdf312f0b060d79e1906eff8f43b64a05ff5e2727872 uv-aarch64-apple-darwin.tar.gz
    +    5b3a80d385d26fb9f63579a0712d020ec413ada38a6900e88fdfd41b58795b7e *uv-aarch64-pc-windows-msvc.zip
    +    3bb77b764618f65a969da063d1c4a507d8de5360ca2858f771cab109fa879a4d uv-aarch64-unknown-linux-gnu.tar.gz
    +    40ba6e62de35820e8460eacee2b5b8f4add70a834d3859f7a60cdfc6b19ab599 uv-aarch64-unknown-linux-musl.tar.gz
    +    f108a49a17b0700d7121b0215575f96c46a203774ed80ef40544005d7af74a67 uv-arm-unknown-linux-musleabihf.tar.gz
    +    730d8ef57f221ecc572d47b227ecbd8261be08157efb351311f7bc1f6c1c944a uv-armv7-unknown-linux-gnueabihf.tar.gz
    +    b78dacab7c2fb352301d8997c0c705c3959a4e44d7b3afe670aee2397a2c9ab3 uv-armv7-unknown-linux-musleabihf.tar.gz
    +    08482edef8b077e12e73f76e6b4bb0300c054b8009cfac5cc354297f47d24623 *uv-i686-pc-windows-msvc.zip
    +    0ce384911d4af9007576ceba2557c5d474a953ced34602ee4e09bd888cee13c0 uv-i686-unknown-linux-gnu.tar.gz
    +    b6462dc8190c7a1eafa74287d8ff213764baa49e098aeeb522fa479d29e1c0bf uv-i686-unknown-linux-musl.tar.gz
    +    9a8e8a8927df9fa39af79214ab1acfc227dba9d9e690a424cef1dc17296161a8 uv-powerpc64-unknown-linux-gnu.tar.gz
    +    4880a8e2ba5086e7ed4bd3aecfdae5e353da569ddaac02cd3db598b4c8e77193 uv-powerpc64le-unknown-linux-gnu.tar.gz
    +    0cd68055cedbc5b1194e7e7ab2b35ac7aa1d835c586fb3778c7acb0e8a8ac822 uv-riscv64gc-unknown-linux-gnu.tar.gz
    +    8cc2e70bee35c9e437c2308f130b79acc0d7c43e710296990ed76e702e220912 uv-s390x-unknown-linux-gnu.tar.gz
    +    b799253441726351bc60c2e91254a821001e5e2e22a0e2b077d8983f583e8139 uv-x86_64-apple-darwin.tar.gz
    +    60870fa18d438737088e533ed06617549e42531c522cc9a8fe4455d8e745dc29 *uv-x86_64-pc-windows-msvc.zip
    +    8ca3db7b2a3199171cfc0870be1f819cb853ddcec29a5fa28dae30278922b7ba uv-x86_64-unknown-linux-gnu.tar.gz
    +    38ade73396b48fce89d9d1cb8a7e8f02b6e18a2d87467525ee8fb7e09899f70d uv-x86_64-unknown-linux-musl.tar.gz
    +  )
     
    -    # Substitution:
    -    #     local _checksum_value
    -    # ->
    -    #     local _checksum_value="sha256"
    -    perl -i'' -lpe "s|local _checksum_style$|local _checksum_style=\"sha256\"|" "${script:?}" || return
    +  # Substitution:
    +  #     local _checksum_value
    +  # ->
    +  #     local _checksum_value="sha256"
    +  perl -i'' -lpe "s|local _checksum_style$|local _checksum_style=\"sha256\"|" "${script:?}" || return
     
    -    # Substitution (for each checksum + artifact in the checksums array):
    -    #     case "$_artifact_name" in
    -    #         ...
    -    #         "")
    -    #         ...
    -    #     esac
    -    # ->
    -    #     case "$_artifact_name" in
    -    #         ...
    -    #         "") _checksum_value=""
    -    #         ...
    -    #     esac
    -    for ((i=0; i<"${#checksums[@]}"; i+=2)); do
    -        declare checksum artifact
    -        checksum="${checksums[i]:?}"
    -        artifact="${checksums[i+1]:?}"
    +  # Substitution (for each checksum + artifact in the checksums array):
    +  #     case "$_artifact_name" in
    +  #         ...
    +  #         "")
    +  #         ...
    +  #     esac
    +  # ->
    +  #     case "$_artifact_name" in
    +  #         ...
    +  #         "") _checksum_value=""
    +  #         ...
    +  #     esac
    +  for ((i = 0; i < "${#checksums[@]}"; i += 2)); do
    +    declare checksum artifact
    +    checksum="${checksums[i]:?}"
    +    artifact="${checksums[i + 1]:?}"
     
    -        [[ "${artifact:?}" =~ ^\* ]] && artifact="${artifact:1}"
    -        perl -i'' -lpe "s|(\"${artifact:?}\"\))|\$1 _checksum_value=\"${checksum:?}\"|" "${script:?}" || return
    -    done
    +    [[ "${artifact:?}" =~ ^\* ]] && artifact="${artifact:1}"
    +    perl -i'' -lpe "s|(\"${artifact:?}\"\))|\$1 _checksum_value=\"${checksum:?}\"|" "${script:?}" || return
    +  done
     }
    diff --git a/.evergreen/scripts/run-auth-tests.sh b/.evergreen/scripts/run-auth-tests.sh
    index d7e25c7a4ee..7cd4de5a36b 100755
    --- a/.evergreen/scripts/run-auth-tests.sh
    +++ b/.evergreen/scripts/run-auth-tests.sh
    @@ -27,36 +27,36 @@ chmod 700 "${secrets_dir:?}"
     
     # Create certificate to test X509 auth with Atlas on cloud-prod:
     atlas_x509_path="${secrets_dir:?}/atlas_x509.pem"
    -echo "${atlas_x509_cert_base64:?}" | base64 --decode > "${secrets_dir:?}/atlas_x509.pem"
    +echo "${atlas_x509_cert_base64:?}" | base64 --decode >"${secrets_dir:?}/atlas_x509.pem"
     # Fix path on Windows:
     if $IS_WINDOWS; then
    -    atlas_x509_path="$(cygpath -m "${atlas_x509_path:?}")"
    +  atlas_x509_path="$(cygpath -m "${atlas_x509_path:?}")"
     fi
     
     # Create certificate to test X509 auth with Atlas on cloud-dev
     atlas_x509_dev_path="${secrets_dir:?}/atlas_x509_dev.pem"
    -echo "${atlas_x509_dev_cert_base64:?}" | base64 --decode > "${atlas_x509_dev_path:?}"
    +echo "${atlas_x509_dev_cert_base64:?}" | base64 --decode >"${atlas_x509_dev_path:?}"
     # Fix path on Windows:
     if $IS_WINDOWS; then
    -    atlas_x509_dev_path="$(cygpath -m "${atlas_x509_dev_path}")"
    +  atlas_x509_dev_path="$(cygpath -m "${atlas_x509_dev_path}")"
     fi
     
     # Create Kerberos config and keytab files.
     echo "Setting up Kerberos ... begin"
     if command -v kinit >/dev/null; then
    -    # Copy host config and append realm:
    -    if [ -e /etc/krb5.conf ]; then
    -      cat /etc/krb5.conf > "${secrets_dir:?}/krb5.conf"
    -    fi
    -    cat "${mongoc_dir:?}/.evergreen/etc/kerberos.realm" >> "${secrets_dir:?}/krb5.conf"
    -    # Set up keytab:
    -    echo "${keytab:?}" | base64 --decode > "${secrets_dir:?}/drivers.keytab"
    -    # Initialize kerberos:
    -    KRB5_CONFIG="${secrets_dir:?}/krb5.conf" kinit -k -t "${secrets_dir:?}/drivers.keytab" -p drivers@LDAPTEST.10GEN.CC
    -    echo "Setting up Kerberos ... done"
    +  # Copy host config and append realm:
    +  if [ -e /etc/krb5.conf ]; then
    +    cat /etc/krb5.conf >"${secrets_dir:?}/krb5.conf"
    +  fi
    +  cat "${mongoc_dir:?}/.evergreen/etc/kerberos.realm" >>"${secrets_dir:?}/krb5.conf"
    +  # Set up keytab:
    +  echo "${keytab:?}" | base64 --decode >"${secrets_dir:?}/drivers.keytab"
    +  # Initialize kerberos:
    +  KRB5_CONFIG="${secrets_dir:?}/krb5.conf" kinit -k -t "${secrets_dir:?}/drivers.keytab" -p drivers@LDAPTEST.10GEN.CC
    +  echo "Setting up Kerberos ... done"
     else
    -    echo "No 'kinit' detected"
    -    echo "Setting up Kerberos ... skipping"
    +  echo "No 'kinit' detected"
    +  echo "Setting up Kerberos ... skipping"
     fi
     
     declare c_timeout="connectTimeoutMS=30000&serverSelectionTryOnce=false"
    diff --git a/.evergreen/scripts/run-aws-tests.sh b/.evergreen/scripts/run-aws-tests.sh
    index a77300fe36b..82423f99bd3 100755
    --- a/.evergreen/scripts/run-aws-tests.sh
    +++ b/.evergreen/scripts/run-aws-tests.sh
    @@ -8,7 +8,6 @@
     # Example:
     # TESTCASE=EC2 run-aws-tests.sh
     
    -
     set -o errexit
     set -o pipefail
     
    @@ -51,7 +50,6 @@ expect_failure() {
       "${test_awsauth:?}" "${1:?}" "EXPECT_FAILURE" || exit
     }
     
    -
     if [[ "${TESTCASE:?}" == "REGULAR" ]]; then
       echo "===== Testing regular auth via URI ====="
     
    @@ -148,29 +146,29 @@ if [[ "${TESTCASE:?}" == "ASSUME_ROLE_WITH_WEB_IDENTITY" ]]; then
     
       echo "Valid credentials via Web Identity - should succeed"
       AWS_ROLE_ARN="${AWS_ROLE_ARN:?}" \
    -  AWS_WEB_IDENTITY_TOKEN_FILE="${AWS_WEB_IDENTITY_TOKEN_FILE:?}" \
    +    AWS_WEB_IDENTITY_TOKEN_FILE="${AWS_WEB_IDENTITY_TOKEN_FILE:?}" \
         expect_success "mongodb://localhost/?authMechanism=MONGODB-AWS"
     
       echo "Valid credentials via Web Identity with session name - should succeed"
       AWS_ROLE_ARN="${AWS_ROLE_ARN:?}" \
    -  AWS_WEB_IDENTITY_TOKEN_FILE="${AWS_WEB_IDENTITY_TOKEN_FILE:?}" \
    -  AWS_ROLE_SESSION_NAME=test \
    +    AWS_WEB_IDENTITY_TOKEN_FILE="${AWS_WEB_IDENTITY_TOKEN_FILE:?}" \
    +    AWS_ROLE_SESSION_NAME=test \
         expect_success "mongodb://localhost/?authMechanism=MONGODB-AWS"
     
       echo "Invalid AWS_ROLE_ARN via Web Identity with session name - should fail"
       AWS_ROLE_ARN="invalid_role_arn" \
    -  AWS_WEB_IDENTITY_TOKEN_FILE="${AWS_WEB_IDENTITY_TOKEN_FILE:?}" \
    +    AWS_WEB_IDENTITY_TOKEN_FILE="${AWS_WEB_IDENTITY_TOKEN_FILE:?}" \
         expect_failure "mongodb://localhost/?authMechanism=MONGODB-AWS"
     
       echo "Invalid AWS_WEB_IDENTITY_TOKEN_FILE via Web Identity with session name - should fail"
       AWS_ROLE_ARN="${AWS_ROLE_ARN:?}" \
    -  AWS_WEB_IDENTITY_TOKEN_FILE="/invalid/path" \
    +    AWS_WEB_IDENTITY_TOKEN_FILE="/invalid/path" \
         expect_failure "mongodb://localhost/?authMechanism=MONGODB-AWS"
     
       echo "Invalid AWS_ROLE_SESSION_NAME via Web Identity with session name - should fail"
       AWS_ROLE_ARN="${AWS_ROLE_ARN:?}" \
    -  AWS_WEB_IDENTITY_TOKEN_FILE="${AWS_WEB_IDENTITY_TOKEN_FILE:?}" \
    -  AWS_ROLE_SESSION_NAME="contains_invalid_character_^" \
    +    AWS_WEB_IDENTITY_TOKEN_FILE="${AWS_WEB_IDENTITY_TOKEN_FILE:?}" \
    +    AWS_ROLE_SESSION_NAME="contains_invalid_character_^" \
         expect_failure "mongodb://localhost/?authMechanism=MONGODB-AWS"
       exit
     fi
    diff --git a/.evergreen/scripts/run-ocsp-test.sh b/.evergreen/scripts/run-ocsp-test.sh
    index 7396170c7af..2a5cb29600b 100755
    --- a/.evergreen/scripts/run-ocsp-test.sh
    +++ b/.evergreen/scripts/run-ocsp-test.sh
    @@ -90,14 +90,14 @@ command -V "${mongoc_ping:?}"
     # Custom OpenSSL library may be installed. Only prepend to LD_LIBRARY_PATH when
     # necessary to avoid conflicting with system binary requirements.
     if [[ -d "${openssl_install_dir:?}" ]]; then
    -    if [[ -d "${openssl_install_dir:?}/lib64" ]]; then
    -        LD_LIBRARY_PATH="${openssl_install_dir:?}/lib64:${LD_LIBRARY_PATH:-}"
    -        DYLD_LIBRARY_PATH="${openssl_install_dir:?}/lib64:${DYLD_LIBRARY_PATH:-}"
    -    else
    -        LD_LIBRARY_PATH="${openssl_install_dir:?}/lib:${LD_LIBRARY_PATH:-}"
    -        DYLD_LIBRARY_PATH="${openssl_install_dir:?}/lib:${DYLD_LIBRARY_PATH:-}"
    -    fi
    -    export LD_LIBRARY_PATH DYLD_LIBRARY_PATH
    +  if [[ -d "${openssl_install_dir:?}/lib64" ]]; then
    +    LD_LIBRARY_PATH="${openssl_install_dir:?}/lib64:${LD_LIBRARY_PATH:-}"
    +    DYLD_LIBRARY_PATH="${openssl_install_dir:?}/lib64:${DYLD_LIBRARY_PATH:-}"
    +  else
    +    LD_LIBRARY_PATH="${openssl_install_dir:?}/lib:${LD_LIBRARY_PATH:-}"
    +    DYLD_LIBRARY_PATH="${openssl_install_dir:?}/lib:${DYLD_LIBRARY_PATH:-}"
    +  fi
    +  export LD_LIBRARY_PATH DYLD_LIBRARY_PATH
     fi
     
     expect_success() {
    diff --git a/.evergreen/scripts/run-tests.sh b/.evergreen/scripts/run-tests.sh
    index a52e8c89eeb..0d705fe536b 100755
    --- a/.evergreen/scripts/run-tests.sh
    +++ b/.evergreen/scripts/run-tests.sh
    @@ -111,9 +111,9 @@ fi
     # Sanitizer environment variables.
     export ASAN_OPTIONS="detect_leaks=1 abort_on_error=1 symbolize=1"
     export ASAN_SYMBOLIZER_PATH
    -if command -v "/opt/mongodbtoolchain/v4/bin/llvm-symbolizer" > /dev/null; then
    +if command -v "/opt/mongodbtoolchain/v4/bin/llvm-symbolizer" >/dev/null; then
       ASAN_SYMBOLIZER_PATH="/opt/mongodbtoolchain/v4/bin/llvm-symbolizer"
    -elif command -v "/opt/mongodbtoolchain/v3/bin/llvm-symbolizer" > /dev/null; then
    +elif command -v "/opt/mongodbtoolchain/v3/bin/llvm-symbolizer" >/dev/null; then
       ASAN_SYMBOLIZER_PATH="/opt/mongodbtoolchain/v3/bin/llvm-symbolizer"
     fi
     export TSAN_OPTIONS="suppressions=.tsan-suppressions"
    diff --git a/.evergreen/scripts/simple_http_server.py b/.evergreen/scripts/simple_http_server.py
    index b83276afa37..482762d293b 100755
    --- a/.evergreen/scripts/simple_http_server.py
    +++ b/.evergreen/scripts/simple_http_server.py
    @@ -11,20 +11,17 @@ def do_GET(self):
             self.send_response(http.HTTPStatus.OK)
             self.send_header('Content-type', 'text/html')
             self.end_headers()
    -        self.wfile.write(
    -            'Response to GET by simple HTTP server'.encode('utf-8'))
    +        self.wfile.write('Response to GET by simple HTTP server'.encode('utf-8'))
     
         def do_POST(self):
             self.send_response(http.HTTPStatus.OK)
             self.send_header('Content-type', 'text/html')
             self.end_headers()
    -        self.wfile.write(
    -            'Response to POST by simple HTTP server'.encode('utf-8'))
    +        self.wfile.write('Response to POST by simple HTTP server'.encode('utf-8'))
     
     
     def main():
    -    HTTPServer(server_address=('', 18000),
    -               RequestHandlerClass=Simple).serve_forever()
    +    HTTPServer(server_address=('', 18000), RequestHandlerClass=Simple).serve_forever()
     
     
     if __name__ == '__main__':
    diff --git a/build/fake_kms_provider_server.py b/build/fake_kms_provider_server.py
    index abe8779e31e..8b6448a3bbd 100644
    --- a/build/fake_kms_provider_server.py
    +++ b/build/fake_kms_provider_server.py
    @@ -4,6 +4,7 @@
     import time
     import traceback
     from pathlib import Path
    +from typing import TYPE_CHECKING, Any, Callable, Iterable, cast, overload
     
     import bottle
     from bottle import Bottle, HTTPResponse
    @@ -11,44 +12,32 @@
     kms_provider = Bottle(autojson=True)
     """A mock server for Azure IMDS and GCP metadata"""
     
    -from typing import TYPE_CHECKING, Any, Callable, Iterable, cast, overload
    -
     if not TYPE_CHECKING:
         from bottle import request
     else:
         from typing import Protocol
     
         class _RequestParams(Protocol):
    -
    -        def __getitem__(self, key: str) -> str:
    -            ...
    +        def __getitem__(self, key: str) -> str: ...
     
             @overload
    -        def get(self, key: str) -> 'str | None':
    -            ...
    +        def get(self, key: str) -> 'str | None': ...
     
             @overload
    -        def get(self, key: str, default: str) -> str:
    -            ...
    +        def get(self, key: str, default: str) -> str: ...
     
         class _HeadersDict(dict[str, str]):
    -
    -        def raw(self, key: str) -> 'bytes | None':
    -            ...
    +        def raw(self, key: str) -> 'bytes | None': ...
     
         class _Request(Protocol):
    -
             @property
    -        def query(self) -> _RequestParams:
    -            ...
    +        def query(self) -> _RequestParams: ...
     
             @property
    -        def params(self) -> _RequestParams:
    -            ...
    +        def params(self) -> _RequestParams: ...
     
             @property
    -        def headers(self) -> _HeadersDict:
    -            ...
    +        def headers(self) -> _HeadersDict: ...
     
         request = cast('_Request', None)
     
    @@ -59,9 +48,7 @@ def parse_qs(qs: str) -> 'dict[str, str]':
         return dict(bottle._parse_qsl(qs))  # type: ignore
     
     
    -_HandlerFuncT = Callable[
    -    [],
    -    'None|str|bytes|dict[str, Any]|bottle.BaseResponse|Iterable[bytes|str]']
    +_HandlerFuncT = Callable[[], 'None|str|bytes|dict[str, Any]|bottle.BaseResponse|Iterable[bytes|str]']
     
     
     def handle_asserts(fn: _HandlerFuncT) -> _HandlerFuncT:
    @@ -73,9 +60,7 @@ def wrapped():
                 return fn()
             except AssertionError as e:
                 traceback.print_exc()
    -            return bottle.HTTPResponse(status=400,
    -                                       body=json.dumps({'error':
    -                                                        list(e.args)}))
    +            return bottle.HTTPResponse(status=400, body=json.dumps({'error': list(e.args)}))
     
         return wrapped
     
    @@ -83,20 +68,21 @@ def wrapped():
     def test_params() -> 'dict[str, str]':
         return parse_qs(request.headers.get('X-MongoDB-HTTP-TestParams', ''))
     
    +
     @kms_provider.get('/computeMetadata/v1/instance/service-accounts/default/token')
     @handle_asserts
     def get_gcp_token():
    -    metadata_header = request.headers.get("Metadata-Flavor")
    +    metadata_header = request.headers.get('Metadata-Flavor')
         assert metadata_header == 'Google'
     
         case = test_params().get('case')
         print('Case is:', case)
    -    if case == '404': 
    +    if case == '404':
             return HTTPResponse(status=404)
    -    
    +
         if case == 'bad-json':
             return b'{"access-token": }'
    -    
    +
         if case == 'empty-json':
             return b'{}'
     
    @@ -107,11 +93,9 @@ def get_gcp_token():
             return _slow()
     
         assert case in (None, ''), 'Unknown HTTP test case "{}"'.format(case)
    -    
    -    return {
    -        'access_token' : 'google-cookie',
    -        'token_type' : 'Bearer'
    -    }
    +
    +    return {'access_token': 'google-cookie', 'token_type': 'Bearer'}
    +
     
     @kms_provider.get('/metadata/identity/oauth2/token')
     @handle_asserts
    @@ -155,10 +139,12 @@ def _gen_giant() -> Iterable[bytes]:
         "Generate a giant message"
         yield b'{ "item": ['
         for _ in range(1024 * 256):
    -        yield (b'null, null, null, null, null, null, null, null, null, null, '
    -               b'null, null, null, null, null, null, null, null, null, null, '
    -               b'null, null, null, null, null, null, null, null, null, null, '
    -               b'null, null, null, null, null, null, null, null, null, null, ')
    +        yield (
    +            b'null, null, null, null, null, null, null, null, null, null, '
    +            b'null, null, null, null, null, null, null, null, null, null, '
    +            b'null, null, null, null, null, null, null, null, null, null, '
    +            b'null, null, null, null, null, null, null, null, null, null, '
    +        )
         yield b' null ] }'
         yield b'\n'
     
    @@ -174,7 +160,8 @@ def _slow() -> Iterable[bytes]:
     
     if __name__ == '__main__':
         print(
    -        'RECOMMENDED: Run this script using bottle.py (e.g. [{} {}/bottle.py fake_kms_provider_server:kms_provider])'
    -        .format(sys.executable,
    -                Path(__file__).resolve().parent))
    +        'RECOMMENDED: Run this script using bottle.py (e.g. [{} {}/bottle.py fake_kms_provider_server:kms_provider])'.format(
    +            sys.executable, Path(__file__).resolve().parent
    +        )
    +    )
         kms_provider.run()
    diff --git a/build/generate-future-functions.py b/build/generate-future-functions.py
    index 1fa42a5f52d..aee30369fad 100644
    --- a/build/generate-future-functions.py
    +++ b/build/generate-future-functions.py
    @@ -30,477 +30,552 @@
     """
     
     from collections import namedtuple
    -from os.path import basename, dirname, join as joinpath, normpath
    +from os.path import basename, dirname, normpath
    +from os.path import join as joinpath
     
     # Please "pip install jinja2".
     from jinja2 import Environment, FileSystemLoader
     
     this_dir = dirname(__file__)
     template_dir = joinpath(this_dir, 'future_function_templates')
    -mock_server_dir = normpath(
    -    joinpath(this_dir, '../src/libmongoc/tests/mock_server'))
    +mock_server_dir = normpath(joinpath(this_dir, '../src/libmongoc/tests/mock_server'))
     
     # Add additional types here. Use typedefs for derived types so they can
     # be named with one symbol.
    -typedef = namedtuple("typedef", ["name", "typedef"])
    +typedef = namedtuple('typedef', ['name', 'typedef'])
     
     # These are typedef'ed if necessary in future-value.h, and added to the union
     # of possible future_value_t.value types. future_value_t getters and setters
     # are generated for all types, as well as future_t getters.
     typedef_list = [
         # Fundamental.
    -    typedef("bool", None),
    -    typedef("char_ptr", "char *"),
    -    typedef("char_ptr_ptr", "char **"),
    -    typedef("int", None),
    -    typedef("int64_t", None),
    -    typedef("size_t", None),
    -    typedef("ssize_t", None),
    -    typedef("uint32_t", None),
    -    typedef("void_ptr",  "void *"),
    -
    +    typedef('bool', None),
    +    typedef('char_ptr', 'char *'),
    +    typedef('char_ptr_ptr', 'char **'),
    +    typedef('int', None),
    +    typedef('int64_t', None),
    +    typedef('size_t', None),
    +    typedef('ssize_t', None),
    +    typedef('uint32_t', None),
    +    typedef('void_ptr', 'void *'),
         # Const fundamental.
    -    typedef("const_char_ptr", "const char *"),
    -    typedef("bool_ptr", "bool *"),
    -
    +    typedef('const_char_ptr', 'const char *'),
    +    typedef('bool_ptr', 'bool *'),
         # libbson.
    -    typedef("bson_error_ptr", "bson_error_t *"),
    -    typedef("bson_ptr", "bson_t *"),
    -
    +    typedef('bson_error_ptr', 'bson_error_t *'),
    +    typedef('bson_ptr', 'bson_t *'),
         # Const libbson.
    -    typedef("const_bson_ptr", "const bson_t *"),
    -    typedef("const_bson_ptr_ptr", "const bson_t **"),
    -
    +    typedef('const_bson_ptr', 'const bson_t *'),
    +    typedef('const_bson_ptr_ptr', 'const bson_t **'),
         # libmongoc.
    -    typedef("mongoc_async_ptr", "mongoc_async_t *"),
    -    typedef("mongoc_bulk_operation_ptr", "mongoc_bulk_operation_t *"),
    -    typedef("mongoc_client_ptr", "mongoc_client_t *"),
    -    typedef("mongoc_client_pool_ptr", "mongoc_client_pool_t *"),
    -    typedef("mongoc_collection_ptr", "mongoc_collection_t *"),
    -    typedef("mongoc_cluster_ptr", "mongoc_cluster_t *"),
    -    typedef("mongoc_cmd_parts_ptr", "mongoc_cmd_parts_t *"),
    -    typedef("mongoc_cursor_ptr", "mongoc_cursor_t *"),
    -    typedef("mongoc_database_ptr", "mongoc_database_t *"),
    -    typedef("mongoc_gridfs_file_ptr", "mongoc_gridfs_file_t *"),
    -    typedef("mongoc_gridfs_ptr", "mongoc_gridfs_t *"),
    -    typedef("mongoc_insert_flags_t", None),
    -    typedef("mongoc_iovec_ptr", "mongoc_iovec_t *"),
    -    typedef("mongoc_server_stream_ptr", "mongoc_server_stream_t *"),
    -    typedef("mongoc_query_flags_t", None),
    -    typedef("mongoc_server_description_ptr", "mongoc_server_description_t *"),
    -    typedef("mongoc_ss_optype_t", None),
    -    typedef("mongoc_topology_ptr", "mongoc_topology_t *"),
    -    typedef("mongoc_write_concern_ptr", "mongoc_write_concern_t *"),
    -    typedef("mongoc_change_stream_ptr", "mongoc_change_stream_t *"),
    -    typedef("mongoc_remove_flags_t", None),
    -
    +    typedef('mongoc_async_ptr', 'mongoc_async_t *'),
    +    typedef('mongoc_bulk_operation_ptr', 'mongoc_bulk_operation_t *'),
    +    typedef('mongoc_client_ptr', 'mongoc_client_t *'),
    +    typedef('mongoc_client_pool_ptr', 'mongoc_client_pool_t *'),
    +    typedef('mongoc_collection_ptr', 'mongoc_collection_t *'),
    +    typedef('mongoc_cluster_ptr', 'mongoc_cluster_t *'),
    +    typedef('mongoc_cmd_parts_ptr', 'mongoc_cmd_parts_t *'),
    +    typedef('mongoc_cursor_ptr', 'mongoc_cursor_t *'),
    +    typedef('mongoc_database_ptr', 'mongoc_database_t *'),
    +    typedef('mongoc_gridfs_file_ptr', 'mongoc_gridfs_file_t *'),
    +    typedef('mongoc_gridfs_ptr', 'mongoc_gridfs_t *'),
    +    typedef('mongoc_insert_flags_t', None),
    +    typedef('mongoc_iovec_ptr', 'mongoc_iovec_t *'),
    +    typedef('mongoc_server_stream_ptr', 'mongoc_server_stream_t *'),
    +    typedef('mongoc_query_flags_t', None),
    +    typedef('mongoc_server_description_ptr', 'mongoc_server_description_t *'),
    +    typedef('mongoc_ss_optype_t', None),
    +    typedef('mongoc_topology_ptr', 'mongoc_topology_t *'),
    +    typedef('mongoc_write_concern_ptr', 'mongoc_write_concern_t *'),
    +    typedef('mongoc_change_stream_ptr', 'mongoc_change_stream_t *'),
    +    typedef('mongoc_remove_flags_t', None),
         # Const libmongoc.
    -    typedef("const_mongoc_find_and_modify_opts_ptr",
    -            "const mongoc_find_and_modify_opts_t *"),
    -    typedef("const_mongoc_iovec_ptr", "const mongoc_iovec_t *"),
    -    typedef("const_mongoc_read_prefs_ptr", "const mongoc_read_prefs_t *"),
    -    typedef("const_mongoc_write_concern_ptr",
    -            "const mongoc_write_concern_t *"),
    -    typedef("const_mongoc_ss_log_context_ptr",
    -            "const mongoc_ss_log_context_t *"),
    -    typedef("mongoc_index_model_t_ptr_const_ptr", "mongoc_index_model_t *const *")
    +    typedef('const_mongoc_find_and_modify_opts_ptr', 'const mongoc_find_and_modify_opts_t *'),
    +    typedef('const_mongoc_iovec_ptr', 'const mongoc_iovec_t *'),
    +    typedef('const_mongoc_read_prefs_ptr', 'const mongoc_read_prefs_t *'),
    +    typedef('const_mongoc_write_concern_ptr', 'const mongoc_write_concern_t *'),
    +    typedef('const_mongoc_ss_log_context_ptr', 'const mongoc_ss_log_context_t *'),
    +    typedef('mongoc_index_model_t_ptr_const_ptr', 'mongoc_index_model_t *const *'),
     ]
     
     type_list = [T.name for T in typedef_list]
     type_list_with_void = type_list + ['void']
     
    -param = namedtuple("param", ["type_name", "name"])
    -future_function = namedtuple("future_function", ["ret_type", "name", "params"])
    +param = namedtuple('param', ['type_name', 'name'])
    +future_function = namedtuple('future_function', ['ret_type', 'name', 'params'])
     
     # Add additional functions to be tested here. For a name like "cursor_next", we
     # generate two functions: future_cursor_next to prepare the future_t and launch
     # a background thread, and background_cursor_next to run on the thread and
     # resolve the future.
     future_functions = [
    -    future_function("void",
    -                    "mongoc_async_run",
    -                    [param("mongoc_async_ptr", "async")]),
    -
    -    future_function("uint32_t",
    -                    "mongoc_bulk_operation_execute",
    -                    [param("mongoc_bulk_operation_ptr", "bulk"),
    -                     param("bson_ptr", "reply"),
    -                     param("bson_error_ptr", "error")]),
    -
    -    future_function("bool",
    -                    "mongoc_database_read_command_with_opts",
    -                    [param("mongoc_database_ptr", "database"),
    -                     param("const_bson_ptr", "command"),
    -                     param("const_mongoc_read_prefs_ptr", "read_prefs"),
    -                     param("const_bson_ptr", "opts"),
    -                     param("bson_ptr", "reply"),
    -                     param("bson_error_ptr", "error")]),
    -
    -    future_function("bool",
    -                    "mongoc_database_read_write_command_with_opts",
    -                    [param("mongoc_database_ptr", "database"),
    -                     param("const_bson_ptr", "command"),
    -                     param("const_mongoc_read_prefs_ptr", "read_prefs"),
    -                     param("const_bson_ptr", "opts"),
    -                     param("bson_ptr", "reply"),
    -                     param("bson_error_ptr", "error")]),
    -
    -    future_function("bool",
    -                    "mongoc_database_write_command_with_opts",
    -                    [param("mongoc_database_ptr", "database"),
    -                     param("const_bson_ptr", "command"),
    -                     param("const_bson_ptr", "opts"),
    -                     param("bson_ptr", "reply"),
    -                     param("bson_error_ptr", "error")]),
    -
    -    future_function("bool",
    -                    "mongoc_client_command_simple",
    -                    [param("mongoc_client_ptr", "client"),
    -                     param("const_char_ptr", "db_name"),
    -                     param("const_bson_ptr", "command"),
    -                     param("const_mongoc_read_prefs_ptr", "read_prefs"),
    -                     param("bson_ptr", "reply"),
    -                     param("bson_error_ptr", "error")]),
    -
    -    future_function("bool",
    -                    "mongoc_client_command_with_opts",
    -                    [param("mongoc_client_ptr", "client"),
    -                     param("const_char_ptr", "db_name"),
    -                     param("const_bson_ptr", "command"),
    -                     param("const_mongoc_read_prefs_ptr", "read_prefs"),
    -                     param("const_bson_ptr", "opts"),
    -                     param("bson_ptr", "reply"),
    -                     param("bson_error_ptr", "error")]),
    -
    -    future_function("bool",
    -                    "mongoc_client_read_command_with_opts",
    -                    [param("mongoc_client_ptr", "client"),
    -                     param("const_char_ptr", "db_name"),
    -                     param("const_bson_ptr", "command"),
    -                     param("const_mongoc_read_prefs_ptr", "read_prefs"),
    -                     param("const_bson_ptr", "opts"),
    -                     param("bson_ptr", "reply"),
    -                     param("bson_error_ptr", "error")]),
    -
    -    future_function("bool",
    -                    "mongoc_client_write_command_with_opts",
    -                    [param("mongoc_client_ptr", "client"),
    -                     param("const_char_ptr", "db_name"),
    -                     param("const_bson_ptr", "command"),
    -                     param("const_bson_ptr", "opts"),
    -                     param("bson_ptr", "reply"),
    -                     param("bson_error_ptr", "error")]),
    -
    -    future_function("bool",
    -                    "mongoc_client_read_write_command_with_opts",
    -                    [param("mongoc_client_ptr", "client"),
    -                     param("const_char_ptr", "db_name"),
    -                     param("const_bson_ptr", "command"),
    -                     param("const_mongoc_read_prefs_ptr", "read_prefs"),
    -                     param("const_bson_ptr", "opts"),
    -                     param("bson_ptr", "reply"),
    -                     param("bson_error_ptr", "error")]),
    -
    -    future_function("mongoc_change_stream_ptr",
    -                    "mongoc_client_watch",
    -                    [param("mongoc_client_ptr", "client"),
    -                     param("const_bson_ptr", "pipeline"),
    -                     param("const_bson_ptr", "opts")]),
    -
    -    future_function("mongoc_cursor_ptr",
    -                    "mongoc_collection_aggregate",
    -                    [param("mongoc_collection_ptr", "collection"),
    -                     param("mongoc_query_flags_t", "flags"),
    -                     param("const_bson_ptr", "pipeline"),
    -                     param("const_bson_ptr", "options"),
    -                     param("const_mongoc_read_prefs_ptr", "read_prefs")]),
    -
    -    future_function("bool",
    -                    "mongoc_collection_create_indexes_with_opts",
    -                    [param("mongoc_collection_ptr", "collection"),
    -                     param("mongoc_index_model_t_ptr_const_ptr", "models"),
    -                     param("size_t", "num_models"),
    -                     param("const_bson_ptr", "opts"),
    -                     param("bson_ptr", "reply"),
    -                     param("bson_error_ptr", "error")]),
    -
    -    future_function("bool",
    -                    "mongoc_collection_drop_index_with_opts",
    -                    [param("mongoc_collection_ptr", "collection"),
    -                     param("const_char_ptr", "index_name"),
    -                     param("const_bson_ptr", "opts"),
    -                     param("bson_error_ptr", "error")]),
    -
    -    future_function("bool",
    -                    "mongoc_collection_drop_with_opts",
    -                    [param("mongoc_collection_ptr", "collection"),
    -                     param("const_bson_ptr", "opts"),
    -                     param("bson_error_ptr", "error")]),
    -
    -    future_function("bool",
    -                    "mongoc_collection_find_and_modify_with_opts",
    -                    [param("mongoc_collection_ptr", "collection"),
    -                     param("const_bson_ptr", "query"),
    -                     param("const_mongoc_find_and_modify_opts_ptr", "opts"),
    -                     param("bson_ptr", "reply"),
    -                     param("bson_error_ptr", "error")]),
    -
    -    future_function("bool",
    -                    "mongoc_collection_find_and_modify",
    -                    [param("mongoc_collection_ptr", "collection"),
    -                     param("const_bson_ptr", "query"),
    -                     param("const_bson_ptr", "sort"),
    -                     param("const_bson_ptr", "update"),
    -                     param("const_bson_ptr", "fields"),
    -                     param("bool", "_remove"),
    -                     param("bool", "upsert"),
    -                     param("bool", "_new"),
    -                     param("bson_ptr", "reply"),
    -                     param("bson_error_ptr", "error")]),
    -
    -    future_function("mongoc_cursor_ptr",
    -                    "mongoc_collection_find_indexes_with_opts",
    -                    [param("mongoc_collection_ptr", "collection"),
    -                     param("const_bson_ptr", "opts")]),
    -
    -    future_function("bool",
    -                    "mongoc_collection_insert_many",
    -                    [param("mongoc_collection_ptr", "collection"),
    -                     param("const_bson_ptr_ptr", "documents"),
    -                     param("size_t", "n_documents"),
    -                     param("const_bson_ptr", "opts"),
    -                     param("bson_ptr", "reply"),
    -                     param("bson_error_ptr", "error")]),
    -
    -    future_function("bool",
    -                    "mongoc_collection_insert_one",
    -                    [param("mongoc_collection_ptr", "collection"),
    -                     param("const_bson_ptr", "document"),
    -                     param("const_bson_ptr", "opts"),
    -                     param("bson_ptr", "reply"),
    -                     param("bson_error_ptr", "error")]),
    -
    -    future_function("bool",
    -                    "mongoc_collection_read_command_with_opts",
    -                    [param("mongoc_collection_ptr", "collection"),
    -                     param("const_bson_ptr", "command"),
    -                     param("const_mongoc_read_prefs_ptr", "read_prefs"),
    -                     param("const_bson_ptr", "opts"),
    -                     param("bson_ptr", "reply"),
    -                     param("bson_error_ptr", "error")]),
    -
    -    future_function("bool",
    -                    "mongoc_collection_read_write_command_with_opts",
    -                    [param("mongoc_collection_ptr", "collection"),
    -                     param("const_bson_ptr", "command"),
    -                     param("const_mongoc_read_prefs_ptr", "read_prefs"),
    -                     param("const_bson_ptr", "opts"),
    -                     param("bson_ptr", "reply"),
    -                     param("bson_error_ptr", "error")]),
    -
    -    future_function("bool",
    -                    "mongoc_collection_write_command_with_opts",
    -                    [param("mongoc_collection_ptr", "collection"),
    -                     param("const_bson_ptr", "command"),
    -                     param("const_bson_ptr", "opts"),
    -                     param("bson_ptr", "reply"),
    -                     param("bson_error_ptr", "error")]),
    -
    -    future_function("bool",
    -                    "mongoc_cluster_run_command_parts",
    -                    [param("mongoc_cluster_ptr", "cluster"),
    -                     param("mongoc_server_stream_ptr", "server_stream"),
    -                     param("mongoc_cmd_parts_ptr", "parts"),
    -                     param("bson_ptr", "reply"),
    -                     param("bson_error_ptr", "error")]),
    -
    -    future_function("void",
    -                    "mongoc_cursor_destroy",
    -                    [param("mongoc_cursor_ptr", "cursor")]),
    -
    -    future_function("bool",
    -                    "mongoc_cursor_next",
    -                    [param("mongoc_cursor_ptr", "cursor"),
    -                     param("const_bson_ptr_ptr", "doc")]),
    -
    -    future_function("char_ptr_ptr",
    -                    "mongoc_client_get_database_names_with_opts",
    -                    [param("mongoc_client_ptr", "client"),
    -                     param("const_bson_ptr", "opts"),
    -                     param("bson_error_ptr", "error")]),
    -
    -    future_function("mongoc_server_description_ptr",
    -                    "mongoc_client_select_server",
    -                    [param("mongoc_client_ptr", "client"),
    -                     param("bool", "for_writes"),
    -                     param("const_mongoc_read_prefs_ptr", "prefs"),
    -                     param("bson_error_ptr", "error")]),
    -
    -    future_function("void",
    -                    "mongoc_client_destroy",
    -                    [param("mongoc_client_ptr", "client")]),
    -
    -    future_function("void",
    -                    "mongoc_client_pool_destroy",
    -                    [param("mongoc_client_pool_ptr", "pool")]),
    -
    -    future_function("bool",
    -                    "mongoc_database_command_simple",
    -                    [param("mongoc_database_ptr", "database"),
    -                     param("bson_ptr", "command"),
    -                     param("const_mongoc_read_prefs_ptr", "read_prefs"),
    -                     param("bson_ptr", "reply"),
    -                     param("bson_error_ptr", "error")]),
    -
    -    future_function("bool",
    -                    "mongoc_database_drop_with_opts",
    -                    [param("mongoc_database_ptr", "database"),
    -                     param("const_bson_ptr", "opts"),
    -                     param("bson_error_ptr", "error")]),
    -
    -    future_function("char_ptr_ptr",
    -                    "mongoc_database_get_collection_names_with_opts",
    -                    [param("mongoc_database_ptr", "database"),
    -                     param("const_bson_ptr", "opts"),
    -                     param("bson_error_ptr", "error")]),
    -
    -    future_function("mongoc_change_stream_ptr",
    -                    "mongoc_database_watch",
    -                    [param("mongoc_database_ptr", "database"),
    -                     param("const_bson_ptr", "pipeline"),
    -                     param("const_bson_ptr", "opts")]),
    -
    -    future_function("ssize_t",
    -                    "mongoc_gridfs_file_readv",
    -                    [param("mongoc_gridfs_file_ptr", "file"),
    -                     param("mongoc_iovec_ptr", "iov"),
    -                     param("size_t", "iovcnt"),
    -                     param("size_t", "min_bytes"),
    -                     param("uint32_t", "timeout_msec")]),
    -
    -    future_function("bool",
    -                    "mongoc_gridfs_file_remove",
    -                    [param("mongoc_gridfs_file_ptr", "file"),
    -                     param("bson_error_ptr", "error")]),
    -
    -    future_function("int",
    -                    "mongoc_gridfs_file_seek",
    -                    [param("mongoc_gridfs_file_ptr", "file"),
    -                     param("int64_t", "delta"),
    -                     param("int", "whence")]),
    -
    -    future_function("ssize_t",
    -                    "mongoc_gridfs_file_writev",
    -                    [param("mongoc_gridfs_file_ptr", "file"),
    -                     param("const_mongoc_iovec_ptr", "iov"),
    -                     param("size_t", "iovcnt"),
    -                     param("uint32_t", "timeout_msec")]),
    -
    -    future_function("mongoc_gridfs_file_ptr",
    -                    "mongoc_gridfs_find_one_with_opts",
    -                    [param("mongoc_gridfs_ptr", "gridfs"),
    -                     param("const_bson_ptr", "filter"),
    -                     param("const_bson_ptr", "opts"),
    -                     param("bson_error_ptr", "error")]),
    -
    -    future_function("mongoc_server_description_ptr",
    -                    "mongoc_topology_select",
    -                    [param("mongoc_topology_ptr", "topology"),
    -                     param("mongoc_ss_optype_t", "optype"),
    -                     param("const_mongoc_ss_log_context_ptr", "log_context"),
    -                     param("const_mongoc_read_prefs_ptr", "read_prefs"),
    -                     param("bool_ptr", "must_use_primary"),
    -                     param("bson_error_ptr", "error")]),
    -
    -    future_function("mongoc_gridfs_ptr",
    -                    "mongoc_client_get_gridfs",
    -                    [param("mongoc_client_ptr", "client"),
    -                     param("const_char_ptr", "db"),
    -                     param("const_char_ptr", "prefix"),
    -                     param("bson_error_ptr", "error")]),
    -
    -    future_function("mongoc_change_stream_ptr",
    -                    "mongoc_collection_watch",
    -                    [param("mongoc_collection_ptr", "coll"),
    -                     param("const_bson_ptr", "pipeline"),
    -                     param("const_bson_ptr", "opts")]),
    -
    -    future_function("bool",
    -                    "mongoc_change_stream_next",
    -                    [param("mongoc_change_stream_ptr", "stream"),
    -                     param("const_bson_ptr_ptr", "bson")]),
    -
    -    future_function("void",
    -                    "mongoc_change_stream_destroy",
    -                    [param("mongoc_change_stream_ptr", "stream")]),
    -
    -    future_function("bool",
    -                    "mongoc_collection_delete_one",
    -                    [param("mongoc_collection_ptr", "coll"),
    -                     param("const_bson_ptr", "selector"),
    -                     param("const_bson_ptr", "opts"),
    -                     param("bson_ptr", "reply"),
    -                     param("bson_error_ptr", "error")]),
    -
    -    future_function("bool",
    -                    "mongoc_collection_delete_many",
    -                    [param("mongoc_collection_ptr", "coll"),
    -                     param("const_bson_ptr", "selector"),
    -                     param("const_bson_ptr", "opts"),
    -                     param("bson_ptr", "reply"),
    -                     param("bson_error_ptr", "error")]),
    -
    -    future_function("bool",
    -                    "mongoc_collection_remove",
    -                    [param("mongoc_collection_ptr", "coll"),
    -                     param("mongoc_remove_flags_t", "flags"),
    -                     param("const_bson_ptr", "selector"),
    -                     param("const_mongoc_write_concern_ptr", "write_concern"),
    -                     param("bson_error_ptr", "error")]),
    -
    -    future_function("bool",
    -                    "mongoc_collection_update_one",
    -                    [param("mongoc_collection_ptr", "coll"),
    -                     param("const_bson_ptr", "selector"),
    -                     param("const_bson_ptr", "update"),
    -                     param("const_bson_ptr", "opts"),
    -                     param("bson_ptr", "reply"),
    -                     param("bson_error_ptr", "error")]),
    -
    -    future_function("bool",
    -                    "mongoc_collection_update_many",
    -                    [param("mongoc_collection_ptr", "coll"),
    -                     param("const_bson_ptr", "selector"),
    -                     param("const_bson_ptr", "update"),
    -                     param("const_bson_ptr", "opts"),
    -                     param("bson_ptr", "reply"),
    -                     param("bson_error_ptr", "error")]),
    -
    -    future_function("bool",
    -                    "mongoc_collection_replace_one",
    -                    [param("mongoc_collection_ptr", "coll"),
    -                     param("const_bson_ptr", "selector"),
    -                     param("const_bson_ptr", "replacement"),
    -                     param("const_bson_ptr", "opts"),
    -                     param("bson_ptr", "reply"),
    -                     param("bson_error_ptr", "error")]),
    -
    -    future_function("int64_t",
    -                    "mongoc_collection_count_documents",
    -                    [param("mongoc_collection_ptr", "coll"),
    -                     param("const_bson_ptr", "filter"),
    -                     param("const_bson_ptr", "opts"),
    -                     param("const_mongoc_read_prefs_ptr", "read_prefs"),
    -                     param("bson_ptr", "reply"),
    -                     param("bson_error_ptr", "error")]),
    -
    -    future_function("int64_t",
    -                    "mongoc_collection_estimated_document_count",
    -                    [param("mongoc_collection_ptr", "coll"),
    -                     param("const_bson_ptr", "opts"),
    -                     param("const_mongoc_read_prefs_ptr", "read_prefs"),
    -                     param("bson_ptr", "reply"),
    -                     param("bson_error_ptr", "error")]),
    +    future_function('void', 'mongoc_async_run', [param('mongoc_async_ptr', 'async')]),
    +    future_function(
    +        'uint32_t',
    +        'mongoc_bulk_operation_execute',
    +        [param('mongoc_bulk_operation_ptr', 'bulk'), param('bson_ptr', 'reply'), param('bson_error_ptr', 'error')],
    +    ),
    +    future_function(
    +        'bool',
    +        'mongoc_database_read_command_with_opts',
    +        [
    +            param('mongoc_database_ptr', 'database'),
    +            param('const_bson_ptr', 'command'),
    +            param('const_mongoc_read_prefs_ptr', 'read_prefs'),
    +            param('const_bson_ptr', 'opts'),
    +            param('bson_ptr', 'reply'),
    +            param('bson_error_ptr', 'error'),
    +        ],
    +    ),
    +    future_function(
    +        'bool',
    +        'mongoc_database_read_write_command_with_opts',
    +        [
    +            param('mongoc_database_ptr', 'database'),
    +            param('const_bson_ptr', 'command'),
    +            param('const_mongoc_read_prefs_ptr', 'read_prefs'),
    +            param('const_bson_ptr', 'opts'),
    +            param('bson_ptr', 'reply'),
    +            param('bson_error_ptr', 'error'),
    +        ],
    +    ),
    +    future_function(
    +        'bool',
    +        'mongoc_database_write_command_with_opts',
    +        [
    +            param('mongoc_database_ptr', 'database'),
    +            param('const_bson_ptr', 'command'),
    +            param('const_bson_ptr', 'opts'),
    +            param('bson_ptr', 'reply'),
    +            param('bson_error_ptr', 'error'),
    +        ],
    +    ),
    +    future_function(
    +        'bool',
    +        'mongoc_client_command_simple',
    +        [
    +            param('mongoc_client_ptr', 'client'),
    +            param('const_char_ptr', 'db_name'),
    +            param('const_bson_ptr', 'command'),
    +            param('const_mongoc_read_prefs_ptr', 'read_prefs'),
    +            param('bson_ptr', 'reply'),
    +            param('bson_error_ptr', 'error'),
    +        ],
    +    ),
    +    future_function(
    +        'bool',
    +        'mongoc_client_command_with_opts',
    +        [
    +            param('mongoc_client_ptr', 'client'),
    +            param('const_char_ptr', 'db_name'),
    +            param('const_bson_ptr', 'command'),
    +            param('const_mongoc_read_prefs_ptr', 'read_prefs'),
    +            param('const_bson_ptr', 'opts'),
    +            param('bson_ptr', 'reply'),
    +            param('bson_error_ptr', 'error'),
    +        ],
    +    ),
    +    future_function(
    +        'bool',
    +        'mongoc_client_read_command_with_opts',
    +        [
    +            param('mongoc_client_ptr', 'client'),
    +            param('const_char_ptr', 'db_name'),
    +            param('const_bson_ptr', 'command'),
    +            param('const_mongoc_read_prefs_ptr', 'read_prefs'),
    +            param('const_bson_ptr', 'opts'),
    +            param('bson_ptr', 'reply'),
    +            param('bson_error_ptr', 'error'),
    +        ],
    +    ),
    +    future_function(
    +        'bool',
    +        'mongoc_client_write_command_with_opts',
    +        [
    +            param('mongoc_client_ptr', 'client'),
    +            param('const_char_ptr', 'db_name'),
    +            param('const_bson_ptr', 'command'),
    +            param('const_bson_ptr', 'opts'),
    +            param('bson_ptr', 'reply'),
    +            param('bson_error_ptr', 'error'),
    +        ],
    +    ),
    +    future_function(
    +        'bool',
    +        'mongoc_client_read_write_command_with_opts',
    +        [
    +            param('mongoc_client_ptr', 'client'),
    +            param('const_char_ptr', 'db_name'),
    +            param('const_bson_ptr', 'command'),
    +            param('const_mongoc_read_prefs_ptr', 'read_prefs'),
    +            param('const_bson_ptr', 'opts'),
    +            param('bson_ptr', 'reply'),
    +            param('bson_error_ptr', 'error'),
    +        ],
    +    ),
    +    future_function(
    +        'mongoc_change_stream_ptr',
    +        'mongoc_client_watch',
    +        [param('mongoc_client_ptr', 'client'), param('const_bson_ptr', 'pipeline'), param('const_bson_ptr', 'opts')],
    +    ),
    +    future_function(
    +        'mongoc_cursor_ptr',
    +        'mongoc_collection_aggregate',
    +        [
    +            param('mongoc_collection_ptr', 'collection'),
    +            param('mongoc_query_flags_t', 'flags'),
    +            param('const_bson_ptr', 'pipeline'),
    +            param('const_bson_ptr', 'options'),
    +            param('const_mongoc_read_prefs_ptr', 'read_prefs'),
    +        ],
    +    ),
    +    future_function(
    +        'bool',
    +        'mongoc_collection_create_indexes_with_opts',
    +        [
    +            param('mongoc_collection_ptr', 'collection'),
    +            param('mongoc_index_model_t_ptr_const_ptr', 'models'),
    +            param('size_t', 'num_models'),
    +            param('const_bson_ptr', 'opts'),
    +            param('bson_ptr', 'reply'),
    +            param('bson_error_ptr', 'error'),
    +        ],
    +    ),
    +    future_function(
    +        'bool',
    +        'mongoc_collection_drop_index_with_opts',
    +        [
    +            param('mongoc_collection_ptr', 'collection'),
    +            param('const_char_ptr', 'index_name'),
    +            param('const_bson_ptr', 'opts'),
    +            param('bson_error_ptr', 'error'),
    +        ],
    +    ),
    +    future_function(
    +        'bool',
    +        'mongoc_collection_drop_with_opts',
    +        [
    +            param('mongoc_collection_ptr', 'collection'),
    +            param('const_bson_ptr', 'opts'),
    +            param('bson_error_ptr', 'error'),
    +        ],
    +    ),
    +    future_function(
    +        'bool',
    +        'mongoc_collection_find_and_modify_with_opts',
    +        [
    +            param('mongoc_collection_ptr', 'collection'),
    +            param('const_bson_ptr', 'query'),
    +            param('const_mongoc_find_and_modify_opts_ptr', 'opts'),
    +            param('bson_ptr', 'reply'),
    +            param('bson_error_ptr', 'error'),
    +        ],
    +    ),
    +    future_function(
    +        'bool',
    +        'mongoc_collection_find_and_modify',
    +        [
    +            param('mongoc_collection_ptr', 'collection'),
    +            param('const_bson_ptr', 'query'),
    +            param('const_bson_ptr', 'sort'),
    +            param('const_bson_ptr', 'update'),
    +            param('const_bson_ptr', 'fields'),
    +            param('bool', '_remove'),
    +            param('bool', 'upsert'),
    +            param('bool', '_new'),
    +            param('bson_ptr', 'reply'),
    +            param('bson_error_ptr', 'error'),
    +        ],
    +    ),
    +    future_function(
    +        'mongoc_cursor_ptr',
    +        'mongoc_collection_find_indexes_with_opts',
    +        [param('mongoc_collection_ptr', 'collection'), param('const_bson_ptr', 'opts')],
    +    ),
    +    future_function(
    +        'bool',
    +        'mongoc_collection_insert_many',
    +        [
    +            param('mongoc_collection_ptr', 'collection'),
    +            param('const_bson_ptr_ptr', 'documents'),
    +            param('size_t', 'n_documents'),
    +            param('const_bson_ptr', 'opts'),
    +            param('bson_ptr', 'reply'),
    +            param('bson_error_ptr', 'error'),
    +        ],
    +    ),
    +    future_function(
    +        'bool',
    +        'mongoc_collection_insert_one',
    +        [
    +            param('mongoc_collection_ptr', 'collection'),
    +            param('const_bson_ptr', 'document'),
    +            param('const_bson_ptr', 'opts'),
    +            param('bson_ptr', 'reply'),
    +            param('bson_error_ptr', 'error'),
    +        ],
    +    ),
    +    future_function(
    +        'bool',
    +        'mongoc_collection_read_command_with_opts',
    +        [
    +            param('mongoc_collection_ptr', 'collection'),
    +            param('const_bson_ptr', 'command'),
    +            param('const_mongoc_read_prefs_ptr', 'read_prefs'),
    +            param('const_bson_ptr', 'opts'),
    +            param('bson_ptr', 'reply'),
    +            param('bson_error_ptr', 'error'),
    +        ],
    +    ),
    +    future_function(
    +        'bool',
    +        'mongoc_collection_read_write_command_with_opts',
    +        [
    +            param('mongoc_collection_ptr', 'collection'),
    +            param('const_bson_ptr', 'command'),
    +            param('const_mongoc_read_prefs_ptr', 'read_prefs'),
    +            param('const_bson_ptr', 'opts'),
    +            param('bson_ptr', 'reply'),
    +            param('bson_error_ptr', 'error'),
    +        ],
    +    ),
    +    future_function(
    +        'bool',
    +        'mongoc_collection_write_command_with_opts',
    +        [
    +            param('mongoc_collection_ptr', 'collection'),
    +            param('const_bson_ptr', 'command'),
    +            param('const_bson_ptr', 'opts'),
    +            param('bson_ptr', 'reply'),
    +            param('bson_error_ptr', 'error'),
    +        ],
    +    ),
    +    future_function(
    +        'bool',
    +        'mongoc_cluster_run_command_parts',
    +        [
    +            param('mongoc_cluster_ptr', 'cluster'),
    +            param('mongoc_server_stream_ptr', 'server_stream'),
    +            param('mongoc_cmd_parts_ptr', 'parts'),
    +            param('bson_ptr', 'reply'),
    +            param('bson_error_ptr', 'error'),
    +        ],
    +    ),
    +    future_function('void', 'mongoc_cursor_destroy', [param('mongoc_cursor_ptr', 'cursor')]),
    +    future_function(
    +        'bool', 'mongoc_cursor_next', [param('mongoc_cursor_ptr', 'cursor'), param('const_bson_ptr_ptr', 'doc')]
    +    ),
    +    future_function(
    +        'char_ptr_ptr',
    +        'mongoc_client_get_database_names_with_opts',
    +        [param('mongoc_client_ptr', 'client'), param('const_bson_ptr', 'opts'), param('bson_error_ptr', 'error')],
    +    ),
    +    future_function(
    +        'mongoc_server_description_ptr',
    +        'mongoc_client_select_server',
    +        [
    +            param('mongoc_client_ptr', 'client'),
    +            param('bool', 'for_writes'),
    +            param('const_mongoc_read_prefs_ptr', 'prefs'),
    +            param('bson_error_ptr', 'error'),
    +        ],
    +    ),
    +    future_function('void', 'mongoc_client_destroy', [param('mongoc_client_ptr', 'client')]),
    +    future_function('void', 'mongoc_client_pool_destroy', [param('mongoc_client_pool_ptr', 'pool')]),
    +    future_function(
    +        'bool',
    +        'mongoc_database_command_simple',
    +        [
    +            param('mongoc_database_ptr', 'database'),
    +            param('bson_ptr', 'command'),
    +            param('const_mongoc_read_prefs_ptr', 'read_prefs'),
    +            param('bson_ptr', 'reply'),
    +            param('bson_error_ptr', 'error'),
    +        ],
    +    ),
    +    future_function(
    +        'bool',
    +        'mongoc_database_drop_with_opts',
    +        [param('mongoc_database_ptr', 'database'), param('const_bson_ptr', 'opts'), param('bson_error_ptr', 'error')],
    +    ),
    +    future_function(
    +        'char_ptr_ptr',
    +        'mongoc_database_get_collection_names_with_opts',
    +        [param('mongoc_database_ptr', 'database'), param('const_bson_ptr', 'opts'), param('bson_error_ptr', 'error')],
    +    ),
    +    future_function(
    +        'mongoc_change_stream_ptr',
    +        'mongoc_database_watch',
    +        [
    +            param('mongoc_database_ptr', 'database'),
    +            param('const_bson_ptr', 'pipeline'),
    +            param('const_bson_ptr', 'opts'),
    +        ],
    +    ),
    +    future_function(
    +        'ssize_t',
    +        'mongoc_gridfs_file_readv',
    +        [
    +            param('mongoc_gridfs_file_ptr', 'file'),
    +            param('mongoc_iovec_ptr', 'iov'),
    +            param('size_t', 'iovcnt'),
    +            param('size_t', 'min_bytes'),
    +            param('uint32_t', 'timeout_msec'),
    +        ],
    +    ),
    +    future_function(
    +        'bool', 'mongoc_gridfs_file_remove', [param('mongoc_gridfs_file_ptr', 'file'), param('bson_error_ptr', 'error')]
    +    ),
    +    future_function(
    +        'int',
    +        'mongoc_gridfs_file_seek',
    +        [param('mongoc_gridfs_file_ptr', 'file'), param('int64_t', 'delta'), param('int', 'whence')],
    +    ),
    +    future_function(
    +        'ssize_t',
    +        'mongoc_gridfs_file_writev',
    +        [
    +            param('mongoc_gridfs_file_ptr', 'file'),
    +            param('const_mongoc_iovec_ptr', 'iov'),
    +            param('size_t', 'iovcnt'),
    +            param('uint32_t', 'timeout_msec'),
    +        ],
    +    ),
    +    future_function(
    +        'mongoc_gridfs_file_ptr',
    +        'mongoc_gridfs_find_one_with_opts',
    +        [
    +            param('mongoc_gridfs_ptr', 'gridfs'),
    +            param('const_bson_ptr', 'filter'),
    +            param('const_bson_ptr', 'opts'),
    +            param('bson_error_ptr', 'error'),
    +        ],
    +    ),
    +    future_function(
    +        'mongoc_server_description_ptr',
    +        'mongoc_topology_select',
    +        [
    +            param('mongoc_topology_ptr', 'topology'),
    +            param('mongoc_ss_optype_t', 'optype'),
    +            param('const_mongoc_ss_log_context_ptr', 'log_context'),
    +            param('const_mongoc_read_prefs_ptr', 'read_prefs'),
    +            param('bool_ptr', 'must_use_primary'),
    +            param('bson_error_ptr', 'error'),
    +        ],
    +    ),
    +    future_function(
    +        'mongoc_gridfs_ptr',
    +        'mongoc_client_get_gridfs',
    +        [
    +            param('mongoc_client_ptr', 'client'),
    +            param('const_char_ptr', 'db'),
    +            param('const_char_ptr', 'prefix'),
    +            param('bson_error_ptr', 'error'),
    +        ],
    +    ),
    +    future_function(
    +        'mongoc_change_stream_ptr',
    +        'mongoc_collection_watch',
    +        [param('mongoc_collection_ptr', 'coll'), param('const_bson_ptr', 'pipeline'), param('const_bson_ptr', 'opts')],
    +    ),
    +    future_function(
    +        'bool',
    +        'mongoc_change_stream_next',
    +        [param('mongoc_change_stream_ptr', 'stream'), param('const_bson_ptr_ptr', 'bson')],
    +    ),
    +    future_function('void', 'mongoc_change_stream_destroy', [param('mongoc_change_stream_ptr', 'stream')]),
    +    future_function(
    +        'bool',
    +        'mongoc_collection_delete_one',
    +        [
    +            param('mongoc_collection_ptr', 'coll'),
    +            param('const_bson_ptr', 'selector'),
    +            param('const_bson_ptr', 'opts'),
    +            param('bson_ptr', 'reply'),
    +            param('bson_error_ptr', 'error'),
    +        ],
    +    ),
    +    future_function(
    +        'bool',
    +        'mongoc_collection_delete_many',
    +        [
    +            param('mongoc_collection_ptr', 'coll'),
    +            param('const_bson_ptr', 'selector'),
    +            param('const_bson_ptr', 'opts'),
    +            param('bson_ptr', 'reply'),
    +            param('bson_error_ptr', 'error'),
    +        ],
    +    ),
    +    future_function(
    +        'bool',
    +        'mongoc_collection_remove',
    +        [
    +            param('mongoc_collection_ptr', 'coll'),
    +            param('mongoc_remove_flags_t', 'flags'),
    +            param('const_bson_ptr', 'selector'),
    +            param('const_mongoc_write_concern_ptr', 'write_concern'),
    +            param('bson_error_ptr', 'error'),
    +        ],
    +    ),
    +    future_function(
    +        'bool',
    +        'mongoc_collection_update_one',
    +        [
    +            param('mongoc_collection_ptr', 'coll'),
    +            param('const_bson_ptr', 'selector'),
    +            param('const_bson_ptr', 'update'),
    +            param('const_bson_ptr', 'opts'),
    +            param('bson_ptr', 'reply'),
    +            param('bson_error_ptr', 'error'),
    +        ],
    +    ),
    +    future_function(
    +        'bool',
    +        'mongoc_collection_update_many',
    +        [
    +            param('mongoc_collection_ptr', 'coll'),
    +            param('const_bson_ptr', 'selector'),
    +            param('const_bson_ptr', 'update'),
    +            param('const_bson_ptr', 'opts'),
    +            param('bson_ptr', 'reply'),
    +            param('bson_error_ptr', 'error'),
    +        ],
    +    ),
    +    future_function(
    +        'bool',
    +        'mongoc_collection_replace_one',
    +        [
    +            param('mongoc_collection_ptr', 'coll'),
    +            param('const_bson_ptr', 'selector'),
    +            param('const_bson_ptr', 'replacement'),
    +            param('const_bson_ptr', 'opts'),
    +            param('bson_ptr', 'reply'),
    +            param('bson_error_ptr', 'error'),
    +        ],
    +    ),
    +    future_function(
    +        'int64_t',
    +        'mongoc_collection_count_documents',
    +        [
    +            param('mongoc_collection_ptr', 'coll'),
    +            param('const_bson_ptr', 'filter'),
    +            param('const_bson_ptr', 'opts'),
    +            param('const_mongoc_read_prefs_ptr', 'read_prefs'),
    +            param('bson_ptr', 'reply'),
    +            param('bson_error_ptr', 'error'),
    +        ],
    +    ),
    +    future_function(
    +        'int64_t',
    +        'mongoc_collection_estimated_document_count',
    +        [
    +            param('mongoc_collection_ptr', 'coll'),
    +            param('const_bson_ptr', 'opts'),
    +            param('const_mongoc_read_prefs_ptr', 'read_prefs'),
    +            param('bson_ptr', 'reply'),
    +            param('bson_error_ptr', 'error'),
    +        ],
    +    ),
     ]
     
     for fn in future_functions:
    @@ -523,7 +598,7 @@
     def future_function_name(fn):
         if fn.name.startswith('mongoc'):
             # E.g. future_cursor_next().
    -        return 'future' + fn.name[len('mongoc'):]
    +        return 'future' + fn.name[len('mongoc') :]
         else:
             # E.g. future_mongoc_client_command_simple().
             return 'future_' + fn.name
    @@ -532,16 +607,18 @@ def future_function_name(fn):
     env = Environment(loader=FileSystemLoader(template_dir))
     env.filters['future_function_name'] = future_function_name
     
    -files = ["future.h",
    -         "future.c",
    -         "future-value.h",
    -         "future-value.c",
    -         "future-functions.h",
    -         "future-functions.c"]
    +files = [
    +    'future.h',
    +    'future.c',
    +    'future-value.h',
    +    'future-value.c',
    +    'future-functions.h',
    +    'future-functions.c',
    +]
     
     for file_name in files:
         print(file_name)
         with open(joinpath(mock_server_dir, file_name), 'w+') as f:
    -        t = env.get_template(file_name + ".template")
    +        t = env.get_template(file_name + '.template')
             f.write(t.render(globals()))
             f.write('\n')
    diff --git a/build/generate-opts.py b/build/generate-opts.py
    index 71736433aef..9f9b24c9986 100644
    --- a/build/generate-opts.py
    +++ b/build/generate-opts.py
    @@ -25,9 +25,10 @@
     """
     
     # yapf: disable
    -from collections import OrderedDict
    -from os.path import basename, dirname, join as joinpath, normpath
     import re
    +from collections import OrderedDict
    +from os.path import basename, dirname, normpath
    +from os.path import join as joinpath
     
     from jinja2 import Environment, FileSystemLoader  # Please "pip install jinja2".
     
    diff --git a/build/proc-ctl.py b/build/proc-ctl.py
    index 9446bf2892f..373a93d39ac 100644
    --- a/build/proc-ctl.py
    +++ b/build/proc-ctl.py
    @@ -16,48 +16,30 @@
     from typing import TYPE_CHECKING, NoReturn, Sequence, Union, cast
     
     if TYPE_CHECKING:
    -    from typing import (Literal, NamedTuple, TypedDict)
    +    from typing import Literal, NamedTuple, TypedDict
     
     INTERUPT_SIGNAL = signal.SIGINT if os.name != 'nt' else signal.CTRL_C_SIGNAL
     
     
     def create_parser() -> argparse.ArgumentParser:
         parser = argparse.ArgumentParser('proc-ctl')
    -    grp = parser.add_subparsers(title='Commands',
    -                                dest='command',
    -                                metavar='')
    +    grp = parser.add_subparsers(title='Commands', dest='command', metavar='')
     
         start = grp.add_parser('start', help='Start a new subprocess')
    -    start.add_argument('--ctl-dir',
    -                       help='The control directory for the subprocess',
    -                       required=True,
    -                       type=Path)
    -    start.add_argument('--cwd',
    -                       help='The new subdirectory of the spawned process',
    -                       type=Path)
    -    start.add_argument(
    -        '--spawn-wait',
    -        help='Number of seconds to wait for child to be running',
    -        type=float,
    -        default=3)
    -    start.add_argument('child_command',
    -                       nargs='+',
    -                       help='The command to execute',
    -                       metavar=' [args...]')
    +    start.add_argument('--ctl-dir', help='The control directory for the subprocess', required=True, type=Path)
    +    start.add_argument('--cwd', help='The new subdirectory of the spawned process', type=Path)
    +    start.add_argument('--spawn-wait', help='Number of seconds to wait for child to be running', type=float, default=3)
    +    start.add_argument('child_command', nargs='+', help='The command to execute', metavar=' [args...]')
     
         stop = grp.add_parser('stop', help='Stop a running subprocess')
    -    stop.add_argument('--ctl-dir',
    -                      help='The control directory for the subprocess',
    -                      required=True,
    -                      type=Path)
    -    stop.add_argument('--stop-wait',
    -                      help='Number of seconds to wait for stopping',
    -                      type=float,
    -                      default=5)
    -    stop.add_argument('--if-not-running',
    -                      help='Action to take if the child is not running',
    -                      choices=['fail', 'ignore'],
    -                      default='fail')
    +    stop.add_argument('--ctl-dir', help='The control directory for the subprocess', required=True, type=Path)
    +    stop.add_argument('--stop-wait', help='Number of seconds to wait for stopping', type=float, default=5)
    +    stop.add_argument(
    +        '--if-not-running',
    +        help='Action to take if the child is not running',
    +        choices=['fail', 'ignore'],
    +        default='fail',
    +    )
     
         ll_run = grp.add_parser('__run')
         ll_run.add_argument('--ctl-dir', type=Path, required=True)
    @@ -67,33 +49,39 @@ def create_parser() -> argparse.ArgumentParser:
     
     
     if TYPE_CHECKING:
    -    StartCommandArgs = NamedTuple('StartCommandArgs', [
    -        ('command', Literal['start']),
    -        ('ctl_dir', Path),
    -        ('cwd', Path),
    -        ('child_command', Sequence[str]),
    -        ('spawn_wait', float),
    -    ])
    -
    -    StopCommandArgs = NamedTuple('StopCommandArgs', [
    -        ('command', Literal['stop']),
    -        ('ctl_dir', Path),
    -        ('stop_wait', float),
    -        ('if_not_running', Literal['fail', 'ignore']),
    -    ])
    -
    -    _RunCommandArgs = NamedTuple('_RunCommandArgs', [
    -        ('command', Literal['__run']),
    -        ('child_command', Sequence[str]),
    -        ('ctl_dir', Path),
    -    ])
    +    StartCommandArgs = NamedTuple(
    +        'StartCommandArgs',
    +        [
    +            ('command', Literal['start']),
    +            ('ctl_dir', Path),
    +            ('cwd', Path),
    +            ('child_command', Sequence[str]),
    +            ('spawn_wait', float),
    +        ],
    +    )
    +
    +    StopCommandArgs = NamedTuple(
    +        'StopCommandArgs',
    +        [
    +            ('command', Literal['stop']),
    +            ('ctl_dir', Path),
    +            ('stop_wait', float),
    +            ('if_not_running', Literal['fail', 'ignore']),
    +        ],
    +    )
    +
    +    _RunCommandArgs = NamedTuple(
    +        '_RunCommandArgs',
    +        [
    +            ('command', Literal['__run']),
    +            ('child_command', Sequence[str]),
    +            ('ctl_dir', Path),
    +        ],
    +    )
     
         CommandArgs = Union[StartCommandArgs, StopCommandArgs, _RunCommandArgs]
     
    -    _ResultType = TypedDict('_ResultType', {
    -        'exit': 'str | int | None',
    -        'error': 'str | None'
    -    })
    +    _ResultType = TypedDict('_ResultType', {'exit': 'str | int | None', 'error': 'str | None'})
     
     
     def parse_argv(argv: 'Sequence[str]') -> 'CommandArgs':
    @@ -103,7 +91,6 @@ def parse_argv(argv: 'Sequence[str]') -> 'CommandArgs':
     
     
     class _ChildControl:
    -
         def __init__(self, ctl_dir: Path) -> None:
             self._ctl_dir = ctl_dir
     
    @@ -128,10 +115,7 @@ def get_pid(self) -> 'int | None':
             return int(txt)
     
         def set_exit(self, exit: 'str | int | None', error: 'str | None') -> None:
    -        write_text(self.result_file, json.dumps({
    -            'exit': exit,
    -            'error': error
    -        }))
    +        write_text(self.result_file, json.dumps({'exit': exit, 'error': error}))
             remove_file(self.pid_file)
     
         def get_result(self) -> 'None | _ResultType':
    @@ -159,8 +143,7 @@ def _start(args: 'StartCommandArgs') -> int:
         args.ctl_dir.mkdir(exist_ok=True, parents=True)
         child = _ChildControl(args.ctl_dir)
         if child.get_pid() is not None:
    -        raise RuntimeError('Child process is already running [PID {}]'.format(
    -            child.get_pid()))
    +        raise RuntimeError('Child process is already running [PID {}]'.format(child.get_pid()))
         child.clear_result()
         # Spawn the child controller
         subprocess.Popen(
    @@ -168,7 +151,8 @@ def _start(args: 'StartCommandArgs') -> int:
             cwd=args.cwd,
             stderr=subprocess.STDOUT,
             stdout=args.ctl_dir.joinpath('runner-output.txt').open('wb'),
    -        stdin=subprocess.DEVNULL)
    +        stdin=subprocess.DEVNULL,
    +    )
         expire = datetime.now() + timedelta(seconds=args.spawn_wait)
         # Wait for the PID to appear
         while child.get_pid() is None and child.get_result() is None:
    @@ -182,8 +166,7 @@ def _start(args: 'StartCommandArgs') -> int:
                 raise RuntimeError('Failed to spawn child runner?')
             if result['error']:
                 print(result['error'], file=sys.stderr)
    -        raise RuntimeError('Child exited immediately [Exited {}]'.format(
    -            result['exit']))
    +        raise RuntimeError('Child exited immediately [Exited {}]'.format(result['exit']))
         # Wait to see that it is still running after --spawn-wait seconds
         while child.get_result() is None:
             if expire < datetime.now():
    @@ -194,8 +177,7 @@ def _start(args: 'StartCommandArgs') -> int:
         if result is not None:
             if result['error']:
                 print(result['error'], file=sys.stderr)
    -        raise RuntimeError('Child exited prematurely [Exited {}]'.format(
    -            result['exit']))
    +        raise RuntimeError('Child exited prematurely [Exited {}]'.format(result['exit']))
         return 0
     
     
    @@ -216,8 +198,7 @@ def _stop(args: 'StopCommandArgs') -> int:
             time.sleep(0.1)
         result = child.get_result()
         if result is None:
    -        raise RuntimeError(
    -            'Child process did not exit within the grace period')
    +        raise RuntimeError('Child process did not exit within the grace period')
         return 0
     
     
    @@ -228,7 +209,8 @@ def __run(args: '_RunCommandArgs') -> int:
                 args.child_command,
                 stdout=args.ctl_dir.joinpath('child-output.txt').open('wb'),
                 stderr=subprocess.STDOUT,
    -            stdin=subprocess.DEVNULL)
    +            stdin=subprocess.DEVNULL,
    +        )
         except:
             this.set_exit('spawn-failed', traceback.format_exc())
             raise
    @@ -272,8 +254,7 @@ def remove_file(fpath: Path):
         then delete that file. This ensures the file is "out of the way", even if
         it takes some time to delete.
         """
    -    delname = fpath.with_name(fpath.name + '.delete-' +
    -                              str(random.randint(0, 999999)))
    +    delname = fpath.with_name(fpath.name + '.delete-' + str(random.randint(0, 999999)))
         try:
             fpath.rename(delname)
         except FileNotFoundError:
    diff --git a/build/sphinx/homepage-config/conf.py b/build/sphinx/homepage-config/conf.py
    index f1fae6c5e7b..75415195717 100644
    --- a/build/sphinx/homepage-config/conf.py
    +++ b/build/sphinx/homepage-config/conf.py
    @@ -1,13 +1,14 @@
     # -*- coding: utf-8 -*-
    -from docutils import nodes
     import os
     import sys
     
    +from docutils import nodes
    +
     # Import common docs config.
     this_path = os.path.dirname(__file__)
     sys.path.append(os.path.normpath(os.path.join(this_path, '../')))
     
    -from mongoc_common import *
    +from mongoc_common import *  # noqa: E402, F403
     
     with open(this_path + '/../../../VERSION_CURRENT') as vc:
         release = version = vc.read()
    @@ -18,9 +19,9 @@
     master_doc = 'index'
     
     # General information about the project.
    -project = u'mongoc.org'
    -copyright = u'2009-present, MongoDB, Inc.'
    -author = u'MongoDB, Inc'
    +project = 'mongoc.org'
    +copyright = '2009-present, MongoDB, Inc.'
    +author = 'MongoDB, Inc'
     exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
     
     # The name of the Pygments (syntax highlighting) style to use.
    @@ -32,25 +33,24 @@
     
     # Support :download-link:`bson` or :download-link:`mongoc`.
     def download_link(typ, rawtext, text, lineno, inliner, options={}, content=[]):
    -    if text == "mongoc":
    -        lib = "mongo-c-driver"
    +    if text == 'mongoc':
    +        lib = 'mongo-c-driver'
         else:
    -        raise ValueError(
    -            "download link must be mongoc, not \"%s\"" % text)
    +        raise ValueError('download link must be mongoc, not "%s"' % text)
     
    -    title = "%s-%s" % (lib, version)
    -    url = ("https://github.com/mongodb/mongo-c-driver/releases/tag/%(version)s") % {
    -              "version": version
    -          }
    +    title = '%s-%s' % (lib, version)
    +    url = ('https://github.com/mongodb/mongo-c-driver/releases/tag/%(version)s') % {'version': version}
     
         pnode = nodes.reference(title, title, internal=False, refuri=url)
         return [pnode], []
     
    +
     def setup(app):
    -    mongoc_common_setup(app)
    +    mongoc_common_setup(app)  # noqa: F405
     
         app.add_role('download-link', download_link)
     
    +
     # -- Options for HTML output ----------------------------------------------
     
     html_theme = 'furo'
    @@ -59,15 +59,13 @@ def setup(app):
     html_use_smartypants = False
     html_show_sourcelink = False
     html_use_index = False
    -rst_prolog = rf"""
    +rst_prolog = r"""
     
     .. _mongodb_docs_cdriver: https://www.mongodb.com/docs/languages/c/c-driver/current/
     
     """
     
    -html_sidebars = {
    -    '**': []
    -}
    +html_sidebars = {'**': []}
     
     # Note: http://www.sphinx-doc.org/en/1.5.1/config.html#confval-html_copy_source
     # This will degrade the Javascript quicksearch if we ever use it.
    diff --git a/build/sphinx/mongoc/__init__.py b/build/sphinx/mongoc/__init__.py
    index ea5c2861dfd..bf840f32c8c 100644
    --- a/build/sphinx/mongoc/__init__.py
    +++ b/build/sphinx/mongoc/__init__.py
    @@ -1,8 +1,7 @@
    -from docutils.nodes import literal, Text
    +from docutils.nodes import Text, literal
     from docutils.parsers.rst import roles
    -
    -from sphinx.roles import XRefRole
     from sphinx import version_info as sphinx_version_info
    +from sphinx.roles import XRefRole
     
     
     class SymbolRole(XRefRole):
    @@ -42,8 +41,7 @@ def __call__(self, *args, **kwargs):
     
     
     def setup(app):
    -    roles.register_local_role(
    -        'symbol', SymbolRole(warn_dangling=True, innernodeclass=literal))
    +    roles.register_local_role('symbol', SymbolRole(warn_dangling=True, innernodeclass=literal))
     
         return {
             'version': '1.0',
    diff --git a/build/sphinx/mongoc_common.py b/build/sphinx/mongoc_common.py
    index 32a26074378..4247d649083 100644
    --- a/build/sphinx/mongoc_common.py
    +++ b/build/sphinx/mongoc_common.py
    @@ -1,11 +1,10 @@
     import os
     import re
     from pathlib import Path
    -from typing import Any, Iterable, Sequence, Union, List, Tuple, Dict
    +from typing import Any, Dict, Iterable, List, Sequence, Tuple, Union
     
     from docutils import nodes
     from docutils.nodes import Node, document
    -
     from sphinx.application import Sphinx
     from sphinx.application import logger as sphinx_log
     
    @@ -14,12 +13,11 @@
     except ImportError:
         # Try importing from older Sphinx version path.
         from sphinx.builders.html import DirectoryHTMLBuilder
    -from sphinx.config import Config
     from docutils.parsers.rst import Directive
     
     # Do not require newer sphinx. EPEL packages build man pages with Sphinx 1.7.6. Refer: CDRIVER-4767
    -needs_sphinx = "1.7"
    -author = "MongoDB, Inc"
    +needs_sphinx = '1.7'
    +author = 'MongoDB, Inc'
     
     # -- Options for HTML output ----------------------------------------------
     
    @@ -35,7 +33,7 @@ def _file_man_page_name(fpath: Path) -> Union[str, None]:
         "Given an rST file input, find the :man_page: frontmatter value, if present"
         lines = fpath.read_text().splitlines()
         for line in lines:
    -        mat = re.match(r":man_page:\s+(.+)", line)
    +        mat = re.match(r':man_page:\s+(.+)', line)
             if not mat:
                 continue
             return mat[1]
    @@ -45,11 +43,11 @@ def _collect_man(app: Sphinx):
         # Note: 'app' is partially-formed, as this is called from the Sphinx.__init__
         docdir = Path(app.srcdir)
         # Find everything:
    -    children = docdir.rglob("*")
    +    children = docdir.rglob('*')
         # Find only regular files:
         files = filter(Path.is_file, children)
         # Find files that have a .rst extension:
    -    rst_files = (f for f in files if f.suffix == ".rst")
    +    rst_files = (f for f in files if f.suffix == '.rst')
         # Pair each file with its :man_page: frontmatter, if present:
         with_man_name = ((f, _file_man_page_name(f)) for f in rst_files)
         # Filter out pages that do not have a :man_page: item:s
    @@ -62,7 +60,7 @@ def _collect_man(app: Sphinx):
             docname = str(relative_path.parent / filepath.stem)
     
             assert docname, filepath
    -        man_pages.append((docname, man_name, "", [author], 3))
    +        man_pages.append((docname, man_name, '', [author], 3))
     
     
     # -- Options for manual page output ---------------------------------------
    @@ -82,8 +80,8 @@ def add_ga_javascript(app: Sphinx, pagename: str, templatename: str, context: Di
             return
     
         # Add google analytics.
    -    context["metatags"] = (
    -        context.get("metatags", "")
    +    context['metatags'] = (
    +        context.get('metatags', '')
             + """
     
     
    @@ -105,34 +103,34 @@ class VersionList(Directive):
         has_content = True
     
         def run(self) -> Sequence[Node]:
    -        if self.content[0] != "libmongoc" and self.content[0] != "libbson":
    -            print("versionlist must be libmongoc or libbson")
    +        if self.content[0] != 'libmongoc' and self.content[0] != 'libbson':
    +            print('versionlist must be libmongoc or libbson')
                 return []
     
             libname = self.content[0]
    -        env_name = libname.upper() + "_VERSION_LIST"
    +        env_name = libname.upper() + '_VERSION_LIST'
             if env_name not in os.environ:
    -            print(env_name + " not set, not generating version list")
    +            print(env_name + ' not set, not generating version list')
                 return []
     
    -        versions = os.environ[env_name].split(",")
    +        versions = os.environ[env_name].split(',')
     
    -        header = nodes.paragraph("", "")
    -        p = nodes.paragraph("", "")
    -        uri = "https://www.mongoc.org/%s/%s/index.html" % (libname, versions[0])
    -        p += nodes.reference("", "Latest Release (%s)" % versions[0], internal=False, refuri=uri)
    +        header = nodes.paragraph('', '')
    +        p = nodes.paragraph('', '')
    +        uri = 'https://www.mongoc.org/%s/%s/index.html' % (libname, versions[0])
    +        p += nodes.reference('', 'Latest Release (%s)' % versions[0], internal=False, refuri=uri)
             header += p
    -        p = nodes.paragraph("", "")
    -        uri = "https://s3.amazonaws.com/mciuploads/mongo-c-driver/docs/%s/latest/index.html" % (libname)
    -        p += nodes.reference("", "Current Development (master)", internal=False, refuri=uri)
    +        p = nodes.paragraph('', '')
    +        uri = 'https://s3.amazonaws.com/mciuploads/mongo-c-driver/docs/%s/latest/index.html' % (libname)
    +        p += nodes.reference('', 'Current Development (master)', internal=False, refuri=uri)
             header += p
     
             blist = nodes.bullet_list()
             for v in versions:
                 item = nodes.list_item()
    -            p = nodes.paragraph("", "")
    -            uri = "https://www.mongoc.org/%s/%s/index.html" % (libname, v)
    -            p += nodes.reference("", v, internal=False, refuri=uri)
    +            p = nodes.paragraph('', '')
    +            uri = 'https://www.mongoc.org/%s/%s/index.html' % (libname, v)
    +            p += nodes.reference('', v, internal=False, refuri=uri)
                 item += p
                 blist += item
             return [header, blist]
    @@ -140,22 +138,22 @@ def run(self) -> Sequence[Node]:
     
     def generate_html_redirs(app: Sphinx, page: str, templatename: str, context: Dict[str, Any], doctree: Any) -> None:
         builder = app.builder
    -    if not isinstance(builder, DirectoryHTMLBuilder) or "writing-redirect" in context:
    +    if not isinstance(builder, DirectoryHTMLBuilder) or 'writing-redirect' in context:
             return
    -    if page == "index" or page.endswith(".index"):
    +    if page == 'index' or page.endswith('.index'):
             return
         path = app.project.doc2path(page, True)
         out_index_html = Path(builder.get_outfilename(page))
         slug = out_index_html.parent.name
    -    redirect_file = out_index_html.parent.parent / f"{slug}.html"
    +    redirect_file = out_index_html.parent.parent / f'{slug}.html'
         # HACK: handle_page() is not properly reentrant. Save and restore state for
         # this page while we generate our redirects page:
         prev_scripts = builder.script_files[:]
         prev_css = builder.css_files[:]
         builder.handle_page(
    -        f"redirect-for-{page}",
    -        {"target": page, "writing-redirect": 1},
    -        str(Path(__file__).parent.resolve() / "redirect.t.html"),
    +        f'redirect-for-{page}',
    +        {'target': page, 'writing-redirect': 1},
    +        str(Path(__file__).parent.resolve() / 'redirect.t.html'),
             # Note: In Sphinx 8.2, this argument changed from `str` to `Path`, but
             # continues to work with `str`. A future version might need this changed
             # to pass a `Path`, but we can keep `str` for now.
    @@ -164,13 +162,13 @@ def generate_html_redirs(app: Sphinx, page: str, templatename: str, context: Dic
         # Restore prior state:
         builder.script_files[:] = prev_scripts
         builder.css_files[:] = prev_css
    -    sphinx_log.debug("Wrote redirect: %r -> %r", path, page)
    +    sphinx_log.debug('Wrote redirect: %r -> %r', path, page)
     
     
     def mongoc_common_setup(app: Sphinx):
         _collect_man(app)
    -    app.connect("html-page-context", generate_html_redirs)
    -    app.connect("html-page-context", add_ga_javascript)
    +    app.connect('html-page-context', generate_html_redirs)
    +    app.connect('html-page-context', add_ga_javascript)
         # Run sphinx-build -D analytics=1 to enable Google Analytics.
    -    app.add_config_value("analytics", False, "html")
    -    app.add_directive("versionlist", VersionList)
    +    app.add_config_value('analytics', False, 'html')
    +    app.add_directive('versionlist', VersionList)
    diff --git a/docs/dev/conf.py b/docs/dev/conf.py
    index f4a593fa064..fd520f89fd8 100644
    --- a/docs/dev/conf.py
    +++ b/docs/dev/conf.py
    @@ -6,8 +6,8 @@
     # -- Project information -----------------------------------------------------
     # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
     
    -from pathlib import Path
     import re
    +from pathlib import Path
     from typing import Callable
     
     from sphinx import addnodes
    @@ -18,10 +18,10 @@
     THIS_DIR = THIS_FILE.parent
     REPO_ROOT = THIS_DIR.parent.parent
     
    -project = "MongoDB C Driver Development"
    -copyright = "2009-present, MongoDB, Inc."
    -author = "MongoDB, Inc"
    -release = (REPO_ROOT / "VERSION_CURRENT").read_text().strip()
    +project = 'MongoDB C Driver Development'
    +copyright = '2009-present, MongoDB, Inc.'
    +author = 'MongoDB, Inc'
    +release = (REPO_ROOT / 'VERSION_CURRENT').read_text().strip()
     
     # -- General configuration ---------------------------------------------------
     # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
    @@ -29,16 +29,16 @@
     extensions = []
     templates_path = []
     exclude_patterns = []
    -default_role = "any"
    +default_role = 'any'
     
     # -- Options for HTML output -------------------------------------------------
     # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
     
    -html_theme = "nature"
    -pygments_style = "sphinx"
    +html_theme = 'nature'
    +pygments_style = 'sphinx'
     html_static_path = []
     
    -rst_prolog = rf"""
    +rst_prolog = r"""
     .. role:: bash(code)
         :language: bash
     """
    @@ -51,56 +51,50 @@ def annotator(
         Create a parse_node function that adds a parenthesized annotation to an object signature.
         """
     
    -    def parse_node(
    -        env: BuildEnvironment, sig: str, signode: addnodes.desc_signature
    -    ) -> str:
    +    def parse_node(env: BuildEnvironment, sig: str, signode: addnodes.desc_signature) -> str:
             signode += addnodes.desc_name(sig, sig)
             signode += addnodes.desc_sig_space()
    -        signode += addnodes.desc_annotation("", f"({annot})")
    +        signode += addnodes.desc_annotation('', f'({annot})')
             return sig
     
         return parse_node
     
     
    -def parse_earthly_artifact(
    -    env: BuildEnvironment, sig: str, signode: addnodes.desc_signature
    -) -> str:
    +def parse_earthly_artifact(env: BuildEnvironment, sig: str, signode: addnodes.desc_signature) -> str:
         """
         Parse and render the signature of an '.. earthly-artifact::' signature"""
    -    mat = re.match(r"(?P\+.+?)(?P/.*)$", sig)
    +    mat = re.match(r'(?P\+.+?)(?P/.*)$', sig)
         if not mat:
    -        raise RuntimeError(
    -            f"Invalid earthly-artifact signature: {sig!r} (expected “+/ string)"
    -        )
    -    signode += addnodes.desc_addname(mat["target"], mat["target"])
    -    signode += addnodes.desc_name(mat["path"], mat["path"])
    +        raise RuntimeError(f'Invalid earthly-artifact signature: {sig!r} (expected “+/ string)')
    +    signode += addnodes.desc_addname(mat['target'], mat['target'])
    +    signode += addnodes.desc_name(mat['path'], mat['path'])
         signode += addnodes.desc_sig_space()
    -    signode += addnodes.desc_annotation("", "(Earthly Artifact)")
    +    signode += addnodes.desc_annotation('', '(Earthly Artifact)')
         return sig
     
     
     def setup(app: Sphinx):
         app.add_object_type(  # type: ignore
    -        "earthly-target",
    -        "earthly-target",
    -        indextemplate="pair: earthly target; %s",
    -        parse_node=annotator("Earthly target"),
    +        'earthly-target',
    +        'earthly-target',
    +        indextemplate='pair: earthly target; %s',
    +        parse_node=annotator('Earthly target'),
         )
         app.add_object_type(  # type: ignore
    -        "script",
    -        "script",
    -        indextemplate="pair: shell script; %s",
    -        parse_node=annotator("shell script"),
    +        'script',
    +        'script',
    +        indextemplate='pair: shell script; %s',
    +        parse_node=annotator('shell script'),
         )
         app.add_object_type(  # type: ignore
    -        "earthly-artifact",
    -        "earthly-artifact",
    -        indextemplate="pair: earthly artifact; %s",
    +        'earthly-artifact',
    +        'earthly-artifact',
    +        indextemplate='pair: earthly artifact; %s',
             parse_node=parse_earthly_artifact,
         )
         app.add_object_type(  # type: ignore
    -        "file",
    -        "file",
    -        indextemplate="repository file; %s",
    -        parse_node=annotator("repository file"),
    +        'file',
    +        'file',
    +        indextemplate='repository file; %s',
    +        parse_node=annotator('repository file'),
         )
    diff --git a/lldb.pyi b/lldb.pyi
    index 9df00f1776c..6e3082a95d7 100644
    --- a/lldb.pyi
    +++ b/lldb.pyi
    @@ -5,7 +5,7 @@ are used in lldb_bson have been transcribed from the LLDB Python API
     documentation. Refer: https://lldb.llvm.org/python_api.html
     """
     
    -from typing import IO, Any, Sequence, TypeAlias, NoReturn
    +from typing import IO, Any, NoReturn, Sequence, TypeAlias
     
     _Pointer: TypeAlias = int
     _Size: TypeAlias = int
    diff --git a/lldb_bson.py b/lldb_bson.py
    index 4af06ee0fd4..9fbc0273e67 100644
    --- a/lldb_bson.py
    +++ b/lldb_bson.py
    @@ -107,13 +107,13 @@ def _wrap(*args: Any, **kwargs: Any) -> Any:
                 print(e)
                 raise
     
    -    return cast("FuncT", _wrap)
    +    return cast('FuncT', _wrap)
     
     
     @print_errors
     def __lldb_init_module(debugger: SBDebugger, internal_dict: InternalDict):
         # Inject the global magic document traverser:
    -    internal_dict["bson"] = _BSONWalker()
    +    internal_dict['bson'] = _BSONWalker()
         # Register types:
         for cls in _SyntheticMeta.synthetics:
             # The (regex of) the type that is handled by this class:
    @@ -127,34 +127,34 @@ def __lldb_init_module(debugger: SBDebugger, internal_dict: InternalDict):
                 quoted = cls.__summary_str__.replace("'", "\\'")
                 cmd = f"type summary add --summary-string '{quoted}' -x '^{ty}$'"
                 debugger.HandleCommand(cmd)
    -        if hasattr(cls, "__summary__"):
    +        if hasattr(cls, '__summary__'):
                 # More complex: Call a Python function that will create the summary
                 cmd = f"type summary add -F lldb_bson.{cls.__name__}.__summary__ -x '^{ty}$'"
                 debugger.HandleCommand(cmd)
     
         # Render __bson_byte__ as "bytes with ASCII." __bson_byte__ is a
         # debug-only type generated on-the-fly in LLDB
    -    debugger.HandleCommand("type format add -f Y __bson_byte__")
    +    debugger.HandleCommand('type format add -f Y __bson_byte__')
         # Arrays of bytes as a sequence of hex values:
         debugger.HandleCommand(r"type summary add -s '${var[]%x}' -x '__bson_byte__\[[0-9]+\]'")
     
    -    print("lldb_bson is ready")
    +    print('lldb_bson is ready')
     
     
     _ = __lldb_init_module  # Silence "unused function" warnings
     
     
    -FuncT = TypeVar("FuncT", bound=Callable[..., Any])
    -"Type of functions"
    -T = TypeVar("T")
    -"Unbounded invariant type parameter"
    +FuncT = TypeVar('FuncT', bound=Callable[..., Any])
    +'Type of functions'
    +T = TypeVar('T')
    +'Unbounded invariant type parameter'
     InternalDict = Dict[str, Any]
    -"Type of internal dictionaries, provided by LLDB"
    +'Type of internal dictionaries, provided by LLDB'
     
     
     ValueFactory = Callable[[], SBValue]
     ChildItem = Union[
    -    Tuple[str, "str | int"], ValueFactory, Tuple[str, "str | int", "lldb.ValueFormatType|None", "SBType|None"]
    +    Tuple[str, 'str | int'], ValueFactory, Tuple[str, 'str | int', 'lldb.ValueFormatType|None', 'SBType|None']
     ]
     
     
    @@ -165,19 +165,19 @@ class _SyntheticMeta(type):
         """
     
         synthetics: list[Type[SyntheticDisplayBase[Any]]] = []
    -    "The display type classes that have been defined"
    +    'The display type classes that have been defined'
     
         @override
         def __new__(
             cls: Type[_SyntheticMeta], name: str, bases: tuple[type, ...], namespace: dict[str, Any]
         ) -> Type[SyntheticDisplayBase[Any]]:
             new_class: Type[SyntheticDisplayBase[Any]] = type.__new__(cast(type, cls), name, bases, namespace)
    -        if namespace.get("__abstract__"):
    +        if namespace.get('__abstract__'):
                 return new_class
             # Check for the required __typename__ and __parse__
    -        if not hasattr(new_class, "__typename__"):
    +        if not hasattr(new_class, '__typename__'):
                 raise TypeError(f'Type "{new_class}" is missing a "__typename__" attribute')
    -        if not hasattr(new_class, "__parse__"):
    +        if not hasattr(new_class, '__parse__'):
                 raise TypeError(f'Type "{new_class}" has no "__parse__" method')
             # Remember this new class:
             cls.synthetics.append(new_class)
    @@ -186,12 +186,12 @@ def __new__(
     
     class SyntheticDisplayBase(Generic[T], SBSyntheticValueProvider, metaclass=_SyntheticMeta):
         __abstract__: ClassVar[bool] = True
    -    "If true, disables metaclass checks"
    +    'If true, disables metaclass checks'
     
         __summary_str__: ClassVar[str | None] = None
         "Set to an LLDB '--summary-string' formatting string for rendering the inline value summary"
         __enable_synthetic__: ClassVar[bool] = True
    -    "If False, do not generate synthetic children (used for primitive values)"
    +    'If False, do not generate synthetic children (used for primitive values)'
     
         if TYPE_CHECKING:
             __typename__: ClassVar[str]
    @@ -223,16 +223,16 @@ def __get_sbtype__(cls, frame: SBFrame, addr: int) -> SBType:
             Obtain the SBType for this class. Can be overriden in subclasses, and
             the type may consider the value that lives at the address.
             """
    -        return generate_or_get_type(f"struct {cls.__typename__} {{}}", frame)
    +        return generate_or_get_type(f'struct {cls.__typename__} {{}}', frame)
     
         @print_errors
         def __init__(self, val: SBValue, idict: InternalDict | None = None) -> None:
             self.__sbvalue = val
    -        "The SBValue given for this object"
    +        'The SBValue given for this object'
             self.__children: list[ChildItem] = []
    -        "The synthetic children associated with the value"
    +        'The synthetic children associated with the value'
             self.__value: T | None = None
    -        "The decoded value, or ``None`` if it has not yet been decoded"
    +        'The decoded value, or ``None`` if it has not yet been decoded'
     
         @property
         def sbvalue(self) -> SBValue:
    @@ -285,7 +285,7 @@ def get_child_at_index(self, pos: int) -> SBValue:
             """
             # LLDB sometimes calls us with a child that we don't have?
             if pos >= len(self.__children):
    -            print(f"NOTE: lldb called get_child_at_index({pos}), but we only have {len(self.__children)} children")
    +            print(f'NOTE: lldb called get_child_at_index({pos}), but we only have {len(self.__children)} children')
                 return SBValue()
             # Get the child:
             nth = self.__children[pos]
    @@ -293,7 +293,7 @@ def get_child_at_index(self, pos: int) -> SBValue:
             if not isinstance(nth, tuple):
                 # The type is a ValueFactory, which will return a new SBValue
                 val = nth()
    -            assert val.error.success, f"{val.error=}, {nth=}, {pos=}"
    +            assert val.error.success, f'{val.error=}, {nth=}, {pos=}'
                 return val
             # Otherwise, they yielded a tuple:
             if len(nth) == 4:
    @@ -338,7 +338,7 @@ class PrimitiveDisplay(Generic[T], SyntheticDisplayBase[T]):
         __enable_synthetic__: ClassVar[bool] = False
     
         __struct_format__: ClassVar[str]
    -    "The struct format string that will be used to extract the value from memory"
    +    'The struct format string that will be used to extract the value from memory'
     
         @classmethod
         @override
    @@ -358,15 +358,15 @@ def __parse__(cls, value: SBValue) -> T:
     class DoubleDisplay(PrimitiveDisplay[float]):
         """Displays BSON doubles"""
     
    -    __typename__ = "__bson_double__"
    -    __struct_format__: ClassVar[str] = " bytes:
         @override
         def get_children(self) -> Iterable[ChildItem]:
             strlen = len(self.value)
    -        yield "size (bytes)", strlen
    +        yield 'size (bytes)', strlen
             # Create a char[] type to represent the string content:
             array_t = self.sbvalue.target.GetBasicType(lldb.eBasicTypeChar).GetArrayType(strlen)
    -        yield lambda: self.sbvalue.synthetic_child_from_address("[content]", self.address + 4, array_t)
    +        yield lambda: self.sbvalue.synthetic_child_from_address('[content]', self.address + 4, array_t)
             try:
                 # Attempt a UTF-8 decode. We don't actually show this, we just want to
                 # check if there are encoding errors, which we will display in the output
    -            self.value.decode("utf-8")
    +            self.value.decode('utf-8')
             except UnicodeDecodeError as e:
    -            yield "decode error", str(e)
    +            yield 'decode error', str(e)
     
     
     class DocumentInfo(NamedTuple):
         """A decoded document"""
     
         elements: Sequence[DocumentElement | DocumentError]
    -    "Existing elements or errors found while parsing the data"
    +    'Existing elements or errors found while parsing the data'
     
     
     class DocumentElement(NamedTuple):
    @@ -415,7 +415,7 @@ class DocumentError(NamedTuple):
         error_offset: int
     
     
    -class DocumentDisplay(SyntheticDisplayBase["DocumentInfo | DocumentError"]):
    +class DocumentDisplay(SyntheticDisplayBase['DocumentInfo | DocumentError']):
         """
         Main display of BSON document elements. This parses a document/array, and
         generates the child elements that can be further expanded and inspected.
    @@ -424,8 +424,8 @@ class DocumentDisplay(SyntheticDisplayBase["DocumentInfo | DocumentError"]):
         the top-level object and is the one responsible for filling the cache.
         """
     
    -    __typename__ = "__bson_document_[0-9]+__"
    -    __qualifier__: ClassVar[str] = "document"
    +    __typename__ = '__bson_document_[0-9]+__'
    +    __qualifier__: ClassVar[str] = 'document'
         "The 'qualifier' of this type. Overriden by ArrayDisplay."
     
         @classmethod
    @@ -435,12 +435,12 @@ def __summary__(cls, value: SBValue, idict: InternalDict) -> str:
             prefix = cls.__qualifier__
             doc = cls.__parse__(value)
             if isinstance(doc, DocumentError):
    -            return f"Error parsing {prefix} at byte {doc.error_offset}: {doc.message}"
    +            return f'Error parsing {prefix} at byte {doc.error_offset}: {doc.message}'
             if len(doc.elements) == 0:
    -            return f"{prefix} (empty)"
    +            return f'{prefix} (empty)'
             if len(doc.elements) == 1:
    -            return f"{prefix} (1 element)"
    -        return f"{prefix} ({len(doc.elements)} elements)"
    +            return f'{prefix} (1 element)'
    +        return f'{prefix} ({len(doc.elements)} elements)'
     
         @classmethod
         @override
    @@ -449,10 +449,10 @@ def __get_sbtype__(cls, frame: SBFrame, addr: int) -> SBType:
             # Read the size prefix:
             err = SBError()
             header = frame.thread.process.ReadMemory(addr, 4, err)
    -        assert err.success, f"{err=}, {frame=}, {addr=}"
    +        assert err.success, f'{err=}, {frame=}, {addr=}'
             size = read_i32le(header)
             # Generate the type:
    -        typename = f"__bson_{cls.__qualifier__}_{size}__"
    +        typename = f'__bson_{cls.__qualifier__}_{size}__'
             doc_t = generate_or_get_type(
                 f"""
                 enum __bson_byte__ : unsigned char {{}};
    @@ -471,7 +471,7 @@ def __parse__(cls, value: SBValue) -> DocumentInfo | DocumentError:
                 # will be pulled here:
                 buf = memcache.read(value)[1]
             except LookupError as e:
    -            return DocumentError(f"Failed to read memory: {e}", value.load_addr)
    +            return DocumentError(f'Failed to read memory: {e}', value.load_addr)
             return cls.parse_bytes(buf)
     
         @classmethod
    @@ -497,12 +497,12 @@ def _parse_elems(cls, buf: bytes) -> Iterable[DocumentElement | DocumentError]:
                 # Yield this one, and then advance to the next element:
                 yield elem
                 elem_size = 1 + len(elem.key) + 1 + elem.value_size
    -            if cls.__qualifier__ == "array":
    +            if cls.__qualifier__ == 'array':
                     # Validate that array keys are increasing integers:
                     expect_key = str(array_idx)
                     if elem.key != expect_key:
                         yield DocumentError(
    -                        f"Array element must have incrementing integer keys "
    +                        f'Array element must have incrementing integer keys '
                             f'(Expected "{expect_key}", got "{elem.key}")',
                             cur_offset,
                         )
    @@ -511,7 +511,7 @@ def _parse_elems(cls, buf: bytes) -> Iterable[DocumentElement | DocumentError]:
             # Check that we actually consumed the whole buffer:
             remain = len(buf) - cur_offset
             if remain > 1:
    -            yield DocumentError(f"Extra {len(buf)} bytes in document data", cur_offset)
    +            yield DocumentError(f'Extra {len(buf)} bytes in document data', cur_offset)
     
         @classmethod
         def _parse_one(
    @@ -522,24 +522,24 @@ def _parse_one(
                 type_tag = BSONType(buf[0])
             except ValueError:
                 # The tag byte is not a valid tag value
    -            return DocumentError(f"Invalid element type tag 0x{buf[0]:x}", elem_offset)
    +            return DocumentError(f'Invalid element type tag 0x{buf[0]:x}', elem_offset)
             except IndexError:
                 # 'buf' was empty
    -            return DocumentError(f"Unexpected end-of-data", elem_offset)
    +            return DocumentError('Unexpected end-of-data', elem_offset)
             # Stop if this is the end:
             if type_tag == BSONType.EOD:
    -            return DocumentElement(type_tag, "", 0, 0)
    +            return DocumentElement(type_tag, '', 0, 0)
             # Find the null terminator on the key:
             try:
                 key_nulpos = buf.index(0, 1)
             except ValueError:
    -            return DocumentError(f"Unexpected end-of-data while parsing the element key", elem_offset)
    +            return DocumentError('Unexpected end-of-data while parsing the element key', elem_offset)
             key_bytes = buf[1:key_nulpos]
             try:
    -            key = key_bytes.decode("utf-8")
    +            key = key_bytes.decode('utf-8')
             except UnicodeDecodeError as e:
    -            yield DocumentError(f"Element key {key_bytes} is not valid UTF-8 ({e})", elem_offset)
    -            key = key_bytes.decode("utf-8", errors="replace")
    +            yield DocumentError(f'Element key {key_bytes} is not valid UTF-8 ({e})', elem_offset)
    +            key = key_bytes.decode('utf-8', errors='replace')
             # The offset of the value within the element:
             inner_offset = key_nulpos + 1
             # The buffer that starts at the value:
    @@ -580,7 +580,7 @@ def _parse_one(
                 # Size is a length prefix, plus four, plus one for the subtype
                 value_size = read_i32le(value_bytes) + 4 + 1
             else:
    -            assert False, f"Unhandled value tag? {type_tag=} {buf=} {key=}"
    +            assert False, f'Unhandled value tag? {type_tag=} {buf=} {key=}'
             # The absolute offset of the element within the parent document:
             value_offset = elem_offset + inner_offset
             return DocumentElement(type_tag, key, value_offset, value_size)
    @@ -590,12 +590,12 @@ def get_children(self) -> Iterable[ChildItem]:
             doc = self.value
             if isinstance(doc, DocumentError):
                 # The entire document failed to parse. Just generate one error:
    -            yield "[error]", f"Parsing error at byte {doc.error_offset}: {doc.message}"
    +            yield '[error]', f'Parsing error at byte {doc.error_offset}: {doc.message}'
                 return
             for elem in doc.elements:
                 if isinstance(elem, DocumentError):
                     # There was an error at this location.
    -                yield "[error]", f"Data error at offset {elem.error_offset}: {elem.message}"
    +                yield '[error]', f'Data error at offset {elem.error_offset}: {elem.message}'
                 else:
                     # Create a ValueFactory for each element:
                     yield functools.partial(self.create_child, self.sbvalue, elem)
    @@ -603,9 +603,9 @@ def get_children(self) -> Iterable[ChildItem]:
         @classmethod
         def create_child(cls, parent: SBValue, elem: DocumentElement) -> SBValue:
             """Generate the child elements for LLDB to walk through"""
    -        if cls.__qualifier__ == "array":
    +        if cls.__qualifier__ == 'array':
                 # Don't quote the integer keys
    -            name = f"[{elem.key}]"
    +            name = f'[{elem.key}]'
             else:
                 name = f"['{elem.key}']"
             value_addr = parent.load_addr + elem.value_offset
    @@ -637,20 +637,20 @@ def create_child(cls, parent: SBValue, elem: DocumentElement) -> SBValue:
                 BSONType.MinKey: MinKeyDisplay.__get_sbtype__,
             }
             get_type = by_type.get(elem.type)
    -        assert get_type is not None, f"Unhandled type tag? {elem=}"
    +        assert get_type is not None, f'Unhandled type tag? {elem=}'
             # Create the SBType:
             type = get_type(frame, value_addr)
             # Create a synthetic child of that type at the address of the element's value:
             val = parent.synthetic_child_from_address(name, value_addr, type)
    -        assert val.error.success, f"{elem=}, {val.error=}"
    +        assert val.error.success, f'{elem=}, {val.error=}'
             return val
     
     
     class ArrayDisplay(DocumentDisplay):
         """Display for arrays. Most logic is implemented in the DocumentDisplay base."""
     
    -    __typename__ = "__bson_array_[0-9]+__"
    -    __qualifier__: ClassVar[str] = "array"
    +    __typename__ = '__bson_array_[0-9]+__'
    +    __qualifier__: ClassVar[str] = 'array'
     
     
     class BinaryInfo(NamedTuple):
    @@ -661,7 +661,7 @@ class BinaryInfo(NamedTuple):
     class BinaryDisplay(SyntheticDisplayBase[BinaryInfo]):
         """Display for a BSON binary value"""
     
    -    __typename__ = "__bson_binary__"
    +    __typename__ = '__bson_binary__'
     
         @classmethod
         @override
    @@ -677,11 +677,11 @@ def __parse__(cls, value: SBValue) -> BinaryInfo:
     
         @override
         def get_children(self) -> Iterable[ChildItem]:
    -        yield "size", len(self.value.data)
    -        byte_t = generate_or_get_type("enum __bson_byte__ : char {}", self.sbvalue.frame)
    -        yield "subtype", self.value.subtype, lldb.eFormatHex, byte_t
    +        yield 'size', len(self.value.data)
    +        byte_t = generate_or_get_type('enum __bson_byte__ : char {}', self.sbvalue.frame)
    +        yield 'subtype', self.value.subtype, lldb.eFormatHex, byte_t
             array_t = byte_t.GetArrayType(len(self.value.data))
    -        yield lambda: self.sbvalue.synthetic_child_from_address("data", self.address + 5, array_t)
    +        yield lambda: self.sbvalue.synthetic_child_from_address('data', self.address + 5, array_t)
     
     
     class UndefinedDisplay(SyntheticDisplayBase[None]):
    @@ -689,8 +689,8 @@ class UndefinedDisplay(SyntheticDisplayBase[None]):
         Display type for 'undefined' values. Also derived from for other unit types.
         """
     
    -    __typename__ = "__bson_undefined__"
    -    __summary_str__ = "undefined"
    +    __typename__ = '__bson_undefined__'
    +    __summary_str__ = 'undefined'
         __enable_synthetic__: ClassVar[bool] = False
     
         @classmethod
    @@ -702,7 +702,7 @@ def __parse__(cls, value: SBValue) -> None:
     class ObjectIDDisplay(SyntheticDisplayBase[bytes]):
         """Display type for ObjectIDs"""
     
    -    __typename__ = "__bson_objectid__"
    +    __typename__ = '__bson_objectid__'
     
         @classmethod
         @override
    @@ -729,20 +729,20 @@ def __parse__(cls, value: SBValue) -> bytes:
     
         @override
         def get_children(self) -> Iterable[ChildItem]:
    -        yield "spelling", self.value.hex()
    +        yield 'spelling', self.value.hex()
     
     
     class DatetimeDisplay(SyntheticDisplayBase[int]):
         """Display for BSON Datetime objects"""
     
    -    __typename__ = "__bson_datetime__"
    -    __summary_str__: ClassVar[str] = "datetime: ${var[0]}"
    +    __typename__ = '__bson_datetime__'
    +    __summary_str__: ClassVar[str] = 'datetime: ${var[0]}'
     
         @classmethod
         @override
         def __summary__(cls, value: SBValue, idict: InternalDict) -> str:
             dt = datetime.fromtimestamp(cls.__parse__(value) / 1000)
    -        s = f"{dt:%a %b %m %Y %H:%M:%S +%fμs}"
    +        s = f'{dt:%a %b %m %Y %H:%M:%S +%fμs}'
             return f'Date("{s}")'
     
         @classmethod
    @@ -750,7 +750,7 @@ def __summary__(cls, value: SBValue, idict: InternalDict) -> str:
         def __parse__(cls, val: SBValue) -> int:
             buf = memcache.get_cached(val.load_addr)
             buf = buf[:8]
    -        value: int = struct.unpack(" Iterable[ChildItem]:
             # Adjusted to the local time zone:
             adjusted = dt.astimezone()
             yield from {
    -            "[isoformat]": dt.isoformat(),
    -            "[date]": f"{dt:%B %d, %Y}",
    -            "[time]": dt.strftime("%H:%M:%S +%fμs"),
    -            "[local]": adjusted.strftime("%c"),
    -            "Year": dt.year,
    -            "Month": dt.month,
    -            "Day": dt.day,
    -            "Hour": dt.hour,
    -            "Minute": dt.minute,
    -            "Second": dt.second,
    -            "+μs": dt.microsecond,
    +            '[isoformat]': dt.isoformat(),
    +            '[date]': f'{dt:%B %d, %Y}',
    +            '[time]': dt.strftime('%H:%M:%S +%fμs'),
    +            '[local]': adjusted.strftime('%c'),
    +            'Year': dt.year,
    +            'Month': dt.month,
    +            'Day': dt.day,
    +            'Hour': dt.hour,
    +            'Minute': dt.minute,
    +            'Second': dt.second,
    +            '+μs': dt.microsecond,
             }.items()
     
     
     class NullDisplay(UndefinedDisplay):
         """Display for the BSON 'null' type"""
     
    -    __typename__ = "__bson_null__"
    -    __summary_str__ = "null"
    +    __typename__ = '__bson_null__'
    +    __summary_str__ = 'null'
     
     
     class RegexDisplay(SyntheticDisplayBase[Tuple[bytes, bytes]]):
         """Display type for BSON regular expressions"""
     
    -    __typename__ = "__bson_regex_[0-9]+_[0-9]+__"
    +    __typename__ = '__bson_regex_[0-9]+_[0-9]+__'
         __enable_synthetic__: ClassVar[bool] = False
     
         @classmethod
    @@ -815,8 +815,8 @@ def __summary__(cls, value: SBValue, idict: InternalDict) -> str:
             # Create a JS-style regex literal:
             pair = cls.__parse__(value)
             regex, options = cls.decode_pair(pair)
    -        regex = regex.replace("/", "\\/").replace("\n", "\\n")
    -        return f"/{regex}/{options}"
    +        regex = regex.replace('/', '\\/').replace('\n', '\\n')
    +        return f'/{regex}/{options}'
     
         @classmethod
         def parse_at(cls, addr: int) -> tuple[bytes, bytes]:
    @@ -833,16 +833,16 @@ def parse_at(cls, addr: int) -> tuple[bytes, bytes]:
         @classmethod
         def decode_pair(cls, value: tuple[bytes, bytes]) -> tuple[str, str]:
             regex, options = value
    -        regex = regex.decode("utf-8", errors="replace")
    -        options = options.decode("utf-8", errors="replace")
    +        regex = regex.decode('utf-8', errors='replace')
    +        options = options.decode('utf-8', errors='replace')
             return regex, options
     
     
     class DBPointerDisplay(SyntheticDisplayBase[Tuple[bytes, int]]):
         """Display type for DBPointers"""
     
    -    __typename__ = "__bson_dbpointer__"
    -    __summary_str__: ClassVar[str | None] = "DBPointer(${var[0]}, ${var[1]})"
    +    __typename__ = '__bson_dbpointer__'
    +    __summary_str__: ClassVar[str | None] = 'DBPointer(${var[0]}, ${var[1]})'
     
         @classmethod
         @override
    @@ -858,36 +858,36 @@ def get_children(self) -> Iterable[ChildItem]:
             utf8_t = UTF8Display.__get_sbtype__(self.sbvalue.frame, self.address)
             oid_offset = self.value[1]
             oid_t = ObjectIDDisplay.__get_sbtype__(self.sbvalue.frame, self.address + oid_offset)
    -        yield lambda: self.sbvalue.synthetic_child_from_address("collection", self.sbvalue.load_addr, utf8_t)
    -        yield lambda: self.sbvalue.synthetic_child_from_address("object", self.sbvalue.load_addr + oid_offset, oid_t)
    +        yield lambda: self.sbvalue.synthetic_child_from_address('collection', self.sbvalue.load_addr, utf8_t)
    +        yield lambda: self.sbvalue.synthetic_child_from_address('object', self.sbvalue.load_addr + oid_offset, oid_t)
     
     
     class CodeDisplay(UTF8Display):
         """Display type for BSON code"""
     
    -    __typename__ = "__bson_code__"
    -    __summary_str__ = "Code(${var[1]})"
    +    __typename__ = '__bson_code__'
    +    __summary_str__ = 'Code(${var[1]})'
     
     
     class SymbolDisplay(UTF8Display):
         """Display type for BSON symbols"""
     
    -    __typename__ = "__bson_symbol__"
    +    __typename__ = '__bson_symbol__'
     
         @classmethod
         @override
         def __summary__(cls, value: SBValue, idict: InternalDict) -> str:
             spell = cls.__parse__(value)
    -        dec = spell.decode("utf-8", errors="replace").rstrip("\x00")
    -        return f"Symbol({dec})"
    +        dec = spell.decode('utf-8', errors='replace').rstrip('\x00')
    +        return f'Symbol({dec})'
     
     
     class CodeWithScopeDisplay(SyntheticDisplayBase[int]):
         """Display type for BSON 'Code w/ Scope'"""
     
    -    __typename__ = "__code_with_scope__"
    +    __typename__ = '__code_with_scope__'
     
    -    __summary_str__: ClassVar[str | None] = "Code(${var[0][1]}, ${var[1]})"
    +    __summary_str__: ClassVar[str | None] = 'Code(${var[0][1]}, ${var[1]})'
     
         @classmethod
         @override
    @@ -903,54 +903,54 @@ def get_children(self) -> Iterable[ChildItem]:
             code_t = CodeDisplay.__get_sbtype__(self.sbvalue.frame, self.address)
             scope_doc_offset = self.value
             doc_t = DocumentDisplay.__get_sbtype__(self.sbvalue.frame, self.address + scope_doc_offset)
    -        yield lambda: checked(self.sbvalue.synthetic_child_from_address("code", self.address + 4, code_t))
    +        yield lambda: checked(self.sbvalue.synthetic_child_from_address('code', self.address + 4, code_t))
             yield lambda: checked(
    -            self.sbvalue.synthetic_child_from_address("scope", self.address + scope_doc_offset, doc_t)
    +            self.sbvalue.synthetic_child_from_address('scope', self.address + scope_doc_offset, doc_t)
             )
     
     
     class Int32Display(PrimitiveDisplay[int]):
         """Display for 32-bit BSON integers"""
     
    -    __typename__ = "__bson_int32__"
    -    __struct_format__: ClassVar[str] = " str:
    -        return f"NumberInt({cls.__parse__(value)})"
    +        return f'NumberInt({cls.__parse__(value)})'
     
     
     class Int64Display(PrimitiveDisplay[int]):
         """Display for 64-bit BSON integers"""
     
    -    __typename__ = "__bson_int64__"
    -    __struct_format__: ClassVar[str] = " str:
    -        return f"NumberLong({cls.__parse__(value)})"
    +        return f'NumberLong({cls.__parse__(value)})'
     
     
     class TimestampDisplay(SyntheticDisplayBase[Tuple[int, int]]):
         """Display type for BSON timestamps"""
     
    -    __typename__ = "__bson_timestamp__"
    -    __summary_str__ = "Timestamp(${var[0]}, ${var[1]})"
    +    __typename__ = '__bson_timestamp__'
    +    __summary_str__ = 'Timestamp(${var[0]}, ${var[1]})'
     
         @classmethod
         @override
         def __parse__(cls, value: SBValue) -> tuple[int, int]:
             buf = memcache.get_cached(value.load_addr)[:8]
             # Just two 32bit integers:
    -        timestamp, increment = struct.unpack(" Iterable[ChildItem]:
    -        yield "timestamp", self.value[0]
    -        yield "increment", self.value[1]
    +        yield 'timestamp', self.value[0]
    +        yield 'increment', self.value[1]
     
     
     class Decimal128Value(NamedTuple):
    @@ -966,7 +966,7 @@ class Decimal128Value(NamedTuple):
     class Decimal128Display(SyntheticDisplayBase[Decimal128Value]):
         """The display type for BSON's Decimal128 type"""
     
    -    __typename__ = "__bson_decimal128__"
    +    __typename__ = '__bson_decimal128__'
     
         @classmethod
         @override
    @@ -1002,8 +1002,11 @@ def __parse__(cls, value: SBValue) -> Decimal128Value:
             d128_tetra = (hi_word << 64) | low_word
             # Create an array of individual bits (high bits first):
             bits = tuple(((d128_tetra >> n) & 1) for n in range(127, -1, -1))
    +
             # Recombine a sequence of bits into an int (high bits first)
    -        mergebits: Callable[[tuple[int, ...]], int] = lambda bs: functools.reduce(lambda a, b: (a << 1) | b, bs, 0)
    +        def mergebits(bs: tuple[int, ...]) -> int:
    +            return functools.reduce(lambda a, b: (a << 1) | b, bs, 0)
    +
             # Sign bit:
             sign = bits[0]
             # BID uses the first two combo bits to indicate that the exponent is shifted
    @@ -1025,11 +1028,11 @@ def __parse__(cls, value: SBValue) -> Decimal128Value:
                 # Check for special values in the remainder of the combination:
                 more = bits[3:6]
                 if more == (1, 0, 0) or more == (1, 0, 1):
    -                spelling = "Infinity"
    +                spelling = 'Infinity'
                 elif more == (1, 1, 0):
    -                spelling = "NaN (quiet)"
    +                spelling = 'NaN (quiet)'
                 elif more == (1, 1, 1):
    -                spelling = "NaN (signaling)"
    +                spelling = 'NaN (signaling)'
     
             coeff = mergebits(coeff)
             exponent = mergebits(exponent)
    @@ -1042,12 +1045,12 @@ def __parse__(cls, value: SBValue) -> Decimal128Value:
                     spelling = spelling.zfill(abs(e))
                     split = len(spelling) + e
                     w, fr = spelling[:split], spelling[split:]
    -                spelling = f"{w}.{fr}"
    +                spelling = f'{w}.{fr}'
                 else:
    -                spelling = spelling + "0" * e
    +                spelling = spelling + '0' * e
     
             if sign:
    -            spelling = f"-{spelling}"
    +            spelling = f'-{spelling}'
     
             # The "combination" bits
             combination = mergebits(bits[1:18])
    @@ -1055,52 +1058,52 @@ def __parse__(cls, value: SBValue) -> Decimal128Value:
     
         @override
         def get_children(self) -> Iterable[ChildItem]:
    -        yield "sign", self.value.sign
    -        yield "combination", self.value.combination, lldb.eFormatBinary, None
    -        yield "exponent (biased)", self.value.exponent
    -        yield "exponent (actual)", self.value.exponent - 6176
    -        yield "significand", str(self.value.significand)
    -        yield "value", self.value.spelling
    +        yield 'sign', self.value.sign
    +        yield 'combination', self.value.combination, lldb.eFormatBinary, None
    +        yield 'exponent (biased)', self.value.exponent
    +        yield 'exponent (actual)', self.value.exponent - 6176
    +        yield 'significand', str(self.value.significand)
    +        yield 'value', self.value.spelling
     
     
     class MaxKeyDisplay(NullDisplay):
         """The display type for BSON's 'max key' type"""
     
    -    __typename__ = "__bson_maxkey__"
    -    __summary_str__ = "max key"
    +    __typename__ = '__bson_maxkey__'
    +    __summary_str__ = 'max key'
     
     
     class MinKeyDisplay(NullDisplay):
         """The display type for BSON's 'min key' type"""
     
    -    __typename__ = "__bson_minkey__"
    -    __summary_str__ = "min key"
    +    __typename__ = '__bson_minkey__'
    +    __summary_str__ = 'min key'
     
     
     class BSONTInfo(NamedTuple):
         """Information about a bson_t object"""
     
         addr: int
    -    "The address of the pointer to the beginning of the BSON data managed by this object"
    +    'The address of the pointer to the beginning of the BSON data managed by this object'
         size: int
    -    "The size of the BSON data managed/referenced by this object"
    +    'The size of the BSON data managed/referenced by this object'
         flags: int
    -    "Flags of the bson_t object"
    +    'Flags of the bson_t object'
     
     
     class BSONTError(NamedTuple):
         """Represents an error while reading a bson_t object"""
     
         reason: str
    -    "A description of the error that ocurred"
    +    'A description of the error that ocurred'
     
     
    -class BSONTDisplay(SyntheticDisplayBase["BSONTInfo | BSONTError"]):
    +class BSONTDisplay(SyntheticDisplayBase['BSONTInfo | BSONTError']):
         """
         Implements inspection logic for bson_t
         """
     
    -    __typename__ = "bson_t"
    +    __typename__ = 'bson_t'
     
         @classmethod
         @override
    @@ -1117,42 +1120,42 @@ def __parse__(cls, value: SBValue) -> BSONTInfo | BSONTError:
             err = SBError()
             flags = dat.GetUnsignedInt32(err, 0)
             if err.fail:
    -            return BSONTError(f"Failed to read memory at 0x{value.load_addr:x}: {err.description}")
    +            return BSONTError(f'Failed to read memory at 0x{value.load_addr:x}: {err.description}')
             length = dat.GetUnsignedInt32(err, 4)
             if err.fail:
    -            return BSONTError(f"Failed to read memory at 0x{value.load_addr:x}: {err.description}")
    +            return BSONTError(f'Failed to read memory at 0x{value.load_addr:x}: {err.description}')
     
             # Check bogus values:
             MAX_SIZE = 16 * 1024 * 1024
             ALL_FLAGS = (1 << 6) - 1
             if flags & ~ALL_FLAGS or length < 5 or length > MAX_SIZE:
    -            return BSONTError(f"bson_t appears uninitialized/invalid [a] {flags=} {length=}")
    +            return BSONTError(f'bson_t appears uninitialized/invalid [a] {flags=} {length=}')
     
             is_inline = bool(flags & 1)
     
             if is_inline:
                 # Inline objects may only occupy 120 bytes, at most
                 if length > 120:
    -                return BSONTError("bson_t appears uninitialized/invalid [b]")
    +                return BSONTError('bson_t appears uninitialized/invalid [b]')
                 # Look for debug info for the inline impl
    -            inline_t = value.target.FindFirstType("bson_impl_inline_t")
    +            inline_t = value.target.FindFirstType('bson_impl_inline_t')
                 if inline_t:
                     as_inline = value.Cast(inline_t)
    -                ptr = as_inline.GetChildMemberWithName("data").load_addr
    +                ptr = as_inline.GetChildMemberWithName('data').load_addr
                 else:
                     # No debug info? Guess its location as the default
                     ptr = value.load_addr + 4 + 4
                 if not err.success:
    -                return BSONTError(f"Failed to read inline bson_t data: {err}")
    +                return BSONTError(f'Failed to read inline bson_t data: {err}')
                 return BSONTInfo(ptr, length, flags)
     
             # Look for impl_alloc_t
    -        alloc_t = value.target.FindFirstType("bson_impl_alloc_t")
    +        alloc_t = value.target.FindFirstType('bson_impl_alloc_t')
             if alloc_t:
                 alloc = value.Cast(alloc_t)
                 # Walk to the buffer for this value:
    -            offset = alloc.GetChildMemberWithName("offset").unsigned
    -            buf = alloc.GetChildMemberWithName("buf").deref.deref
    +            offset = alloc.GetChildMemberWithName('offset').unsigned
    +            buf = alloc.GetChildMemberWithName('buf').deref.deref
                 ptr = buf.load_addr + offset
                 return BSONTInfo(ptr, length, flags)
     
    @@ -1168,48 +1171,48 @@ def __parse__(cls, value: SBValue) -> BSONTInfo | BSONTError:
             offset_off = buf_off + (ptr_size * 2)
             offset = dat.GetUnsignedInt32(err, offset_off)
             if not err.success:
    -            return BSONTError(f"Failed to read offset of buffer: {err}")
    -        bufptr = value.CreateChildAtOffset("buf", buf_off, u8ptr_t.GetPointerType()).deref
    +            return BSONTError(f'Failed to read offset of buffer: {err}')
    +        bufptr = value.CreateChildAtOffset('buf', buf_off, u8ptr_t.GetPointerType()).deref
             if not bufptr.error.success:
    -            return BSONTError(f"Failed to read the alloc buf: {bufptr.error} {offset=} {buf_off=}")
    +            return BSONTError(f'Failed to read the alloc buf: {bufptr.error} {offset=} {buf_off=}')
             ptr = bufptr.data.GetUnsignedInt64(err, 0)
             assert err.success, err
     
    -        u32_t = value.target.FindFirstType("uint32_t")
    +        u32_t = value.target.FindFirstType('uint32_t')
             addr = SBAddress()
             addr.SetLoadAddress(ptr, value.target)
     
    -        u32 = value.target.CreateValueFromAddress("tmp", addr, u32_t)
    +        u32 = value.target.CreateValueFromAddress('tmp', addr, u32_t)
             assert u32.error.success, u32
             if u32.unsigned != length or length < 5:
    -            return BSONTError(f"bson_t appears uninitialized/invalid [c] {flags=} {length=} {u32.unsigned=}")
    +            return BSONTError(f'bson_t appears uninitialized/invalid [c] {flags=} {length=} {u32.unsigned=}')
             return BSONTInfo(ptr, length, flags)
     
         @override
         def get_children(self) -> Iterable[ChildItem]:
             val = self.value
             if isinstance(val, BSONTError):
    -            yield "[error]", val.reason
    +            yield '[error]', val.reason
                 return
     
             # Imbue the flags with the possible debug info to give it a nice rendering
    -        flags_t = self.sbvalue.target.FindFirstType("bson_flags_t")
    +        flags_t = self.sbvalue.target.FindFirstType('bson_flags_t')
             if flags_t.IsValid():
    -            yield "flags", val.flags, None, flags_t
    +            yield 'flags', val.flags, None, flags_t
             else:
    -            yield "flags", val.flags
    -        yield "data size", val.size
    +            yield 'flags', val.flags
    +        yield 'data size', val.size
             ptr_t = self.sbvalue.target.GetBasicType(lldb.eBasicTypeVoid).GetPointerType()
    -        yield "data address", val.addr, lldb.eFormatPointer, ptr_t
    +        yield 'data address', val.addr, lldb.eFormatPointer, ptr_t
     
             # Generate the __bson_document_xxx__ that will allow walking the document:
             doc_t = DocumentDisplay.__get_sbtype__(self.sbvalue.frame, val.addr)
    -        yield lambda: checked(self.sbvalue.synthetic_child_from_address("[content]", val.addr, doc_t))
    +        yield lambda: checked(self.sbvalue.synthetic_child_from_address('[content]', val.addr, doc_t))
     
     
     def checked(val: SBValue) -> SBValue:
         """Assert that ``val`` is valid. Returns ``val``"""
    -    assert val.error.success, f"{val=} {val.error=}"
    +    assert val.error.success, f'{val=} {val.error=}'
         return val
     
     
    @@ -1217,11 +1220,11 @@ def read_i32le(dat: bytes) -> int:
         """Read a 32-bit integer from the given data."""
         # Truncate before the read:
         buf = dat[0:4]
    -    return struct.unpack(" SBType:
    @@ -1254,8 +1257,8 @@ def generate_or_get_type(expr_prefix: str, frame: SBFrame) -> SBType:
         # Create a new temporary object. Give it a unique name to prevent it from
         # colliding with any possible temporaries we may have generated previously.
         hash = hashlib.md5(expr_prefix.encode()).hexdigest()
    -    varname = f"__bson_lldb_tmp_{hash}"
    -    full_expr = f"{expr_prefix} {varname}; {varname}"
    +    varname = f'__bson_lldb_tmp_{hash}'
    +    full_expr = f'{expr_prefix} {varname}; {varname}'
         tmp = frame.EvaluateExpression(full_expr)
         existing = tmp.type
         _types_cache[cachekey] = existing
    @@ -1307,11 +1310,11 @@ def __rmatmul__(self, lhs: Any) -> SBValue | str:
                 # Evaluate the left-hand string as an expression within the target
                 target = lldb.debugger.GetSelectedTarget()
                 if target is None:
    -                raise RuntimeError("Not attached to a debug target")
    +                raise RuntimeError('Not attached to a debug target')
                 frame = target.process.selected_thread.frames[0]
                 lhs = frame.EvaluateExpression(lhs)
             val: SBValue
    -        if hasattr(lhs.__class__, "unwrap"):
    +        if hasattr(lhs.__class__, 'unwrap'):
                 # CodeLLDB gives us a wrapper around SBValue, but we want the unwrapped
                 # version:
                 val = lhs.__class__.unwrap(lhs)
    @@ -1333,14 +1336,14 @@ def __rmatmul__(self, lhs: Any) -> SBValue | str:
             # Create the synthetic __bson_document_xxx__ object for this doc
             doc_t = DocumentDisplay.__get_sbtype__(val.frame, as_bson.addr)
             # Obtain a value reference to the document data:
    -        retval = val.CreateValueFromAddress("[root]", as_bson.addr, doc_t)
    +        retval = val.CreateValueFromAddress('[root]', as_bson.addr, doc_t)
     
             # Now resolve the path:
             for part in self._path:
                 if isinstance(part, str):
                     # Access via ``p['foo']`` or ``p.foo``, requires our current node
                     # to be a document:
    -                if not retval.type.name.startswith("__bson_document_"):
    +                if not retval.type.name.startswith('__bson_document_'):
                         raise AttributeError(
                             f'Element of type {retval.type.name} cannot be accessed as a document (looking for element "{part}")'
                         )
    @@ -1349,12 +1352,12 @@ def __rmatmul__(self, lhs: Any) -> SBValue | str:
                     want_child_name = f"['{part}']"
                 else:
                     # Access via indexing ``p[42]``, requires an array
    -                if not retval.type.name.startswith("__bson_array_"):
    +                if not retval.type.name.startswith('__bson_array_'):
                         raise AttributeError(
    -                        f"Element of type {retval.type.name} cannot be accessed as an array (looking for element {part})"
    +                        f'Element of type {retval.type.name} cannot be accessed as an array (looking for element {part})'
                         )
                     # Array keys are bracketed, but not quoted
    -                want_child_name = f"[{part}]"
    +                want_child_name = f'[{part}]'
                 # Find all children that match the key (usually only one)
                 matching = (c for c in retval.children if c.name == want_child_name)
                 # Get it:
    @@ -1364,7 +1367,7 @@ def __rmatmul__(self, lhs: Any) -> SBValue | str:
                     if isinstance(part, str):
                         raise KeyError(f'Document has no element "{part}"')
                     else:
    -                    raise IndexError(f"Array index [{part}] is out-of-bounds")
    +                    raise IndexError(f'Array index [{part}] is out-of-bounds')
                 # Set this as our current node, which we may step in further, or
                 # we may be done
                 retval = got
    @@ -1394,7 +1397,7 @@ class _MemoryCache:
     
         def __init__(self):
             self._segments: dict[int, bytes] = {}
    -        "Segments of memory keyed by the base address of the read operation"
    +        'Segments of memory keyed by the base address of the read operation'
     
         def get_cached(self, addr: int) -> bytes:
             """
    @@ -1410,9 +1413,9 @@ def get_cached(self, addr: int) -> bytes:
             segment = self.segment_containing(addr)
             if not segment:
                 # Memory does not exist?
    -            print(f"lldb_bson: Note: Attempted read of uncached address 0x{addr:x}")
    -            print("".join(traceback.format_stack()))
    -            return b"\0" * 512
    +            print(f'lldb_bson: Note: Attempted read of uncached address 0x{addr:x}')
    +            print(''.join(traceback.format_stack()))
    +            return b'\0' * 512
             base_addr, data = segment
             inner_offset = addr - base_addr
             return data[inner_offset:]
    @@ -1446,4 +1449,4 @@ def read_at(self, proc: SBProcess, addr: int, size: int) -> tuple[int, bytes]:
     
     
     memcache = _MemoryCache()
    -"A module-wide memory segment cache."
    +'A module-wide memory segment cache.'
    diff --git a/pyproject.toml b/pyproject.toml
    index 72f42c753d8..d9392ebbc73 100644
    --- a/pyproject.toml
    +++ b/pyproject.toml
    @@ -21,6 +21,11 @@ format = [
         "clang-format~=20.1.0",
     ]
     
    +format-scripts = [
    +    "ruff>=0.13.0",
    +    "shfmt-py>=3.12.0.2",
    +]
    +
     docs = [
         # .evergreen/scripts/build-docs.sh
         "furo>=2023.5.20",
    @@ -45,3 +50,11 @@ mc-evg-generate = "config_generator.generate:main"
     packages = [
         ".evergreen/config_generator",
     ]
    +
    +[tool.ruff]
    +line-length = 120
    +src = [".evergreen", "etc"]
    +
    +[tool.ruff.format]
    +quote-style = "single"
    +indent-style = "space"
    diff --git a/src/libbson/doc/conf.py b/src/libbson/doc/conf.py
    index b0d1a391189..82832fcb64d 100644
    --- a/src/libbson/doc/conf.py
    +++ b/src/libbson/doc/conf.py
    @@ -5,37 +5,37 @@
     
     # Ensure we can import "mongoc" extension module.
     this_path = os.path.dirname(__file__)
    -sys.path.append(os.path.normpath(os.path.join(this_path, "../../../build/sphinx")))
    +sys.path.append(os.path.normpath(os.path.join(this_path, '../../../build/sphinx')))
     
    -from mongoc_common import *
    +from mongoc_common import *  # noqa: E402, F403
     
     extensions = [
    -    "mongoc",
    +    'mongoc',
     ]
     
     # General information about the project.
    -project = "libbson"
    -copyright = "2009-present, MongoDB, Inc."
    -author = "MongoDB, Inc"
    +project = 'libbson'
    +copyright = '2009-present, MongoDB, Inc.'
    +author = 'MongoDB, Inc'
     
    -version_path = os.path.join(os.path.dirname(__file__), "../../..", "VERSION_CURRENT")
    +version_path = os.path.join(os.path.dirname(__file__), '../../..', 'VERSION_CURRENT')
     version = open(version_path).read().strip()
     
    -language = "en"
    -exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
    -master_doc = "index"
    +language = 'en'
    +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
    +master_doc = 'index'
     
     # -- Options for HTML output ----------------------------------------------
     
    -html_theme = "furo"
    -html_title = html_shorttitle = "libbson %s" % version
    +html_theme = 'furo'
    +html_title = html_shorttitle = 'libbson %s' % version
     # html_favicon = None
     
     html_sidebars = {}
     
     html_use_index = False
     
    -rst_prolog = rf"""
    +rst_prolog = r"""
     
     .. _mongodb_docs_cdriver: https://www.mongodb.com/docs/languages/c/c-driver/current/
     
    @@ -47,9 +47,9 @@
     def add_canonical_link(app, pagename, templatename, context, doctree):
         link = f''
     
    -    context["metatags"] = context.get("metatags", "") + link
    +    context['metatags'] = context.get('metatags', '') + link
     
     
     def setup(app):
    -    mongoc_common_setup(app)
    -    app.connect("html-page-context", add_canonical_link)
    +    mongoc_common_setup(app)  # noqa: F405
    +    app.connect('html-page-context', add_canonical_link)
    diff --git a/src/libbson/examples/compile-with-pkg-config-static.sh b/src/libbson/examples/compile-with-pkg-config-static.sh
    index dd84504415c..d7755052cc9 100755
    --- a/src/libbson/examples/compile-with-pkg-config-static.sh
    +++ b/src/libbson/examples/compile-with-pkg-config-static.sh
    @@ -1,6 +1,6 @@
     #!/usr/bin/env bash
     
    -set -o errexit  # Exit the script with error if any of the commands fail
    +set -o errexit # Exit the script with error if any of the commands fail
     
     # -- sphinx-include-start --
     gcc -o hello_bson hello_bson.c $(pkg-config --libs --cflags bson$major-static)
    diff --git a/src/libbson/examples/compile-with-pkg-config.sh b/src/libbson/examples/compile-with-pkg-config.sh
    index 9e4ed5239cc..d7de0256e32 100755
    --- a/src/libbson/examples/compile-with-pkg-config.sh
    +++ b/src/libbson/examples/compile-with-pkg-config.sh
    @@ -1,6 +1,6 @@
     #!/usr/bin/env bash
     
    -set -o errexit  # Exit the script with error if any of the commands fail
    +set -o errexit # Exit the script with error if any of the commands fail
     
     # -- sphinx-include-start --
     gcc -o hello_bson hello_bson.c $(pkg-config --libs --cflags bson$major)
    diff --git a/src/libbson/fuzz/make-dicts.py b/src/libbson/fuzz/make-dicts.py
    index bb64765af86..2d07a12f782 100644
    --- a/src/libbson/fuzz/make-dicts.py
    +++ b/src/libbson/fuzz/make-dicts.py
    @@ -8,60 +8,60 @@
     def generate():
         simple_oid = OID((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12))
         ITEMS: list[LineItem] = [
    -        Comment("This file is GENERATED! DO NOT MODIFY!"),
    -        Comment("Instead, modify the content of make-dicts.py"),
    +        Comment('This file is GENERATED! DO NOT MODIFY!'),
    +        Comment('Instead, modify the content of make-dicts.py'),
             Line(),
    -        Comment("Random values"),
    -        Entry("int32_1729", encode_value(1729)),
    -        Entry("int64_1729", struct.pack(" 0;"))),
    -        Entry("symbol_elem", element("Sym", Symbol("symbol"))),
    -        Entry("oid_elem", element("OID", simple_oid)),
    -        Entry("dbpointer_elem", element("dbp", DBPointer(String("db"), simple_oid))),
    +        Entry('code_elem', element('Js', Code('() => 0;'))),
    +        Entry('symbol_elem', element('Sym', Symbol('symbol'))),
    +        Entry('oid_elem', element('OID', simple_oid)),
    +        Entry('dbpointer_elem', element('dbp', DBPointer(String('db'), simple_oid))),
             Line(),
    -        Comment("Embedded nul"),
    -        Comment("This string contains an embedded null, which is abnormal but valid"),
    -        Entry("string_with_null", element("S0", "string\0value")),
    -        Comment("This regex has an embedded null, which is invalid"),
    -        Entry("bad_regex_elem", element("RxB", Regex("f\0oo", "ig"))),
    +        Comment('Embedded nul'),
    +        Comment('This string contains an embedded null, which is abnormal but valid'),
    +        Entry('string_with_null', element('S0', 'string\0value')),
    +        Comment('This regex has an embedded null, which is invalid'),
    +        Entry('bad_regex_elem', element('RxB', Regex('f\0oo', 'ig'))),
             Comment("This element's key contains an embedded null, which is invalid"),
    -        Entry("bad_key_elem", element("foo\0bar", "string")),
    +        Entry('bad_key_elem', element('foo\0bar', 'string')),
             Line(),
    -        Comment("Objects"),
    -        Entry("obj_with_string", wrap_obj(element("single-elem", "foo"))),
    -        Entry("obj_with_null", wrap_obj(element("null", None))),
    -        Entry("obj_missing_term", wrap_obj(b"")[:-1]),
    +        Comment('Objects'),
    +        Entry('obj_with_string', wrap_obj(element('single-elem', 'foo'))),
    +        Entry('obj_with_null', wrap_obj(element('null', None))),
    +        Entry('obj_missing_term', wrap_obj(b'')[:-1]),
         ]
     
         for it in ITEMS:
             emit(it)
     
     
    -BytesIter = bytes | Iterable["BytesIter"]
    +BytesIter = bytes | Iterable['BytesIter']
     
     
     def flatten(b: BytesIter) -> bytes:
         if isinstance(b, bytes):
             return b
         else:
    -        return b"".join(map(flatten, b))
    +        return b''.join(map(flatten, b))
     
     
     def len_prefix(b: BytesIter) -> bytes:
    @@ -73,7 +73,7 @@ def len_prefix(b: BytesIter) -> bytes:
     
     def make_cstring(s: str) -> bytes:
         """Encode a UTF-8 string and append a null terminator"""
    -    return s.encode("utf-8") + b"\0"
    +    return s.encode('utf-8') + b'\0'
     
     
     def make_string(s: str) -> bytes:
    @@ -85,12 +85,12 @@ def wrap_obj(items: BytesIter) -> bytes:
         """Wrap a sequence of bytes as if a BSON object (adds a header and trailing nul)"""
         bs = flatten(items)
         header = len(bs) + 5
    -    return encode_value(header) + bs + b"\0"
    +    return encode_value(header) + bs + b'\0'
     
     
     class UndefinedType:
         def __bytes__(self) -> bytes:
    -        return b""
    +        return b''
     
     
     class Binary(NamedTuple):
    @@ -187,26 +187,28 @@ def __bytes__(self) -> bytes:
     def encode_value(val: ValueType) -> bytes:
         match val:
             case int(n):
    -            return struct.pack(" bytes:
     
     class Entry(NamedTuple):
         key: str
    -    "The key for the entry. Only for human readability"
    +    'The key for the entry. Only for human readability'
         value: bytes
    -    "The arbitrary bytes that make up the entry"
    +    'The arbitrary bytes that make up the entry'
     
     
     class Comment(NamedTuple):
    @@ -274,21 +276,21 @@ class Comment(NamedTuple):
     
     
     class Line(NamedTuple):
    -    txt: str = ""
    +    txt: str = ''
     
     
     LineItem = Entry | Comment | Line
     
     
     def escape(b: bytes) -> Iterable[str]:
    -    s = b.decode("ascii", "backslashreplace")
    +    s = b.decode('ascii', 'backslashreplace')
         for u8 in b:
             s = chr(u8)  # 0 <= u8 and u8 <= 255
             if s.isascii() and s.isprintable():
                 yield s
                 continue
             # Byte is not valid ASCII, or is not a printable char
    -        yield f"\\x{u8:0>2x}"
    +        yield f'\\x{u8:0>2x}'
     
     
     def emit(item: LineItem):
    @@ -296,12 +298,12 @@ def emit(item: LineItem):
             case Line(t):
                 print(t)
             case Comment(txt):
    -            print(f"# {txt}")
    +            print(f'# {txt}')
             case Entry(key, val):
    -            s = "".join(escape(val))
    -            s = s.replace('"', r"\x22")
    +            s = ''.join(escape(val))
    +            s = s.replace('"', r'\x22')
                 print(f'{key}="{s}"')
     
     
    -if __name__ == "__main__":
    +if __name__ == '__main__':
         generate()
    diff --git a/src/libbson/tests/validate-tests.py b/src/libbson/tests/validate-tests.py
    index 6cac02d655d..b7fa3c5fba1 100644
    --- a/src/libbson/tests/validate-tests.py
    +++ b/src/libbson/tests/validate-tests.py
    @@ -59,22 +59,22 @@ def flatten_bytes(data: _ByteIter) -> bytes:
             case bytes(data):
                 return data
             case it:
    -            return b"".join(map(flatten_bytes, it))
    +            return b''.join(map(flatten_bytes, it))
     
     
     def i32le(i: int) -> bytes:
         """Encode an integer as a 32-bit little-endian integer"""
    -    return struct.pack(" bytes:
         """Encode an integer as a 64-bit little-endian integer"""
    -    return struct.pack(" bytes:
         """Encode a float as a 64-bit little-endian float"""
    -    return struct.pack(" bytes:
    @@ -82,7 +82,7 @@ def doc(*data: _ByteIter) -> bytes:
         flat = flatten_bytes(data)
         # +5 for the null terminator and the header bytes
         hdr = i32le(len(flat) + 5)
    -    return hdr + flat + b"\0"
    +    return hdr + flat + b'\0'
     
     
     def code_with_scope(code: str, doc: _ByteIter) -> bytes:
    @@ -114,10 +114,10 @@ def cstring(s: str | _ByteIter) -> bytes:
         """Encode a string as UTF-8 and add a null terminator"""
         match s:
             case str(s):
    -            return cstring(s.encode("utf-8"))
    +            return cstring(s.encode('utf-8'))
             case bs:
                 bs = flatten_bytes(bs)
    -            return bs + b"\0"
    +            return bs + b'\0'
     
     
     def string(s: str | _ByteIter) -> bytes:
    @@ -159,15 +159,15 @@ class TestCase:
         """The bytes that will be injested by `bson_init_static` to form the document to be validated"""
         description: str | None
         """A plaintext description of the test case and what it actually does. Rendered as a comment."""
    -    flags: str = "0"
    +    flags: str = '0'
         """Spelling of the flags argument passed to the validation API"""
    -    error: ErrorInfo = ErrorInfo("0", "", 0)
    +    error: ErrorInfo = ErrorInfo('0', '', 0)
         """Expected error, if any"""
     
         @property
         def fn_name(self) -> str:
             """Get a C identifier function name for this test case"""
    -        return "_test_case_" + re.sub(r"[^\w]", "_", self.name).lower()
    +        return '_test_case_' + re.sub(r'[^\w]', '_', self.name).lower()
     
     
     def fmt_byte(n: int) -> str:
    @@ -177,16 +177,16 @@ def fmt_byte(n: int) -> str:
         """
         match n:
             case 0:
    -            return "0"
    -        case a if re.match(r"[a-zA-Z0-9.$-]", chr(a)):
    +            return '0'
    +        case a if re.match(r'[a-zA-Z0-9.$-]', chr(a)):
                 return f"'{chr(a)}'"
             case a if a < 10:
                 return str(a)
             case n:
    -            return f"0x{n:0>2x}"
    +            return f'0x{n:0>2x}'
     
     
    -GENERATED_NOTE = "// ! This code is GENERATED! Do not edit it directly!"
    +GENERATED_NOTE = '// ! This code is GENERATED! Do not edit it directly!'
     
     HEADER = rf"""{GENERATED_NOTE}
     // clang-format off
    @@ -204,46 +204,46 @@ def generate(case: TestCase) -> Iterable[str]:
         Generate the lines of a test case function.
         """
         # A comment header
    -    yield f"{GENERATED_NOTE}\n"
    -    yield f"// Case: {case.name}\n"
    +    yield f'{GENERATED_NOTE}\n'
    +    yield f'// Case: {case.name}\n'
         # The function head
    -    yield f"static inline void {case.fn_name}(void) {{\n"
    +    yield f'static inline void {case.fn_name}(void) {{\n'
         # If we have a description, emit that in a block comment
         if case.description:
    -        yield "  /**\n"
    +        yield '  /**\n'
             lines = textwrap.dedent(case.description).strip().splitlines()
    -        yield from (f"   * {ln}\n" for ln in lines)
    -        yield "   */\n"
    +        yield from (f'   * {ln}\n' for ln in lines)
    +        yield '   */\n'
         # Emit the byte array literal
    -    yield "  const uint8_t bytes[] = {\n"
    -    yield "\n".join(
    +    yield '  const uint8_t bytes[] = {\n'
    +    yield '\n'.join(
             textwrap.wrap(
    -            ", ".join(map(fmt_byte, case.data)),
    -            subsequent_indent=" " * 4,
    -            initial_indent=" " * 4,
    +            ', '.join(map(fmt_byte, case.data)),
    +            subsequent_indent=' ' * 4,
    +            initial_indent=' ' * 4,
                 width=80,
             )
         )
    -    yield "\n  };\n"
    +    yield '\n  };\n'
         yield from [
             # Initialize a BSON doc that points to the byte array
    -        "  bson_t doc;\n",
    -        "  mlib_check(bson_init_static(&doc, bytes, sizeof bytes));\n",
    +        '  bson_t doc;\n',
    +        '  mlib_check(bson_init_static(&doc, bytes, sizeof bytes));\n',
             # The error object to be filled
    -        "  bson_error_t error = {0};\n",
    +        '  bson_error_t error = {0};\n',
             # The error offset. Expected to be reset to zero on success.
    -        "  size_t offset = 999999;\n"
    +        '  size_t offset = 999999;\n'
             # Do the actual validation:
    -        f"  const bool is_valid = bson_validate_with_error_and_offset(&doc, {case.flags}, &offset, &error);\n",
    +        f'  const bool is_valid = bson_validate_with_error_and_offset(&doc, {case.flags}, &offset, &error);\n',
         ]
    -    is_error = case.error.code != "0"
    +    is_error = case.error.code != '0'
         yield from [
    -        "  mlib_check(!is_valid);\n" if is_error else "  ASSERT_OR_PRINT(is_valid, error);\n",
    -        f"  mlib_check(error.code, eq, {case.error.code});\n",
    -        f"  mlib_check(error.message, str_eq, {json.dumps(case.error.message)});\n",
    -        f"  mlib_check(offset, eq, {case.error.offset});\n" if is_error else "",
    +        '  mlib_check(!is_valid);\n' if is_error else '  ASSERT_OR_PRINT(is_valid, error);\n',
    +        f'  mlib_check(error.code, eq, {case.error.code});\n',
    +        f'  mlib_check(error.message, str_eq, {json.dumps(case.error.message)});\n',
    +        f'  mlib_check(offset, eq, {case.error.offset});\n' if is_error else '',
         ]
    -    yield "}\n"
    +    yield '}\n'
     
     
     def corruption_at(off: int) -> ErrorInfo:
    @@ -254,20 +254,20 @@ def corruption_at(off: int) -> ErrorInfo:
         Note that this won't match if the error message is something other
         than "corrupt BSON".
         """
    -    return ErrorInfo(BSON_VALIDATE_CORRUPT, "corrupt BSON", off)
    +    return ErrorInfo(BSON_VALIDATE_CORRUPT, 'corrupt BSON', off)
     
     
    -BSON_VALIDATE_CORRUPT = "BSON_VALIDATE_CORRUPT"
    -BSON_VALIDATE_DOLLAR_KEYS = "BSON_VALIDATE_DOLLAR_KEYS"
    -BSON_VALIDATE_DOT_KEYS = "BSON_VALIDATE_DOT_KEYS"
    -BSON_VALIDATE_EMPTY_KEYS = "BSON_VALIDATE_EMPTY_KEYS"
    -BSON_VALIDATE_UTF8 = "BSON_VALIDATE_UTF8"
    -BSON_VALIDATE_UTF8_ALLOW_NULL = "BSON_VALIDATE_UTF8_ALLOW_NULL"
    -MSG_EXPECTED_ID_FOLLOWING_REF = "Expected an $id element following $ref"
    +BSON_VALIDATE_CORRUPT = 'BSON_VALIDATE_CORRUPT'
    +BSON_VALIDATE_DOLLAR_KEYS = 'BSON_VALIDATE_DOLLAR_KEYS'
    +BSON_VALIDATE_DOT_KEYS = 'BSON_VALIDATE_DOT_KEYS'
    +BSON_VALIDATE_EMPTY_KEYS = 'BSON_VALIDATE_EMPTY_KEYS'
    +BSON_VALIDATE_UTF8 = 'BSON_VALIDATE_UTF8'
    +BSON_VALIDATE_UTF8_ALLOW_NULL = 'BSON_VALIDATE_UTF8_ALLOW_NULL'
    +MSG_EXPECTED_ID_FOLLOWING_REF = 'Expected an $id element following $ref'
     
     
     def disallowed_key(char: str, k: str) -> str:
    -    return f"Disallowed '{char}' in element key: \"{k}\""
    +    return f'Disallowed \'{char}\' in element key: "{k}"'
     
     
     # d888888b d88888b .d8888. d888888b       .o88b.  .d8b.  .d8888. d88888b .d8888.
    @@ -279,28 +279,28 @@ def disallowed_key(char: str, k: str) -> str:
     
     CASES: list[TestCase] = [
         TestCase(
    -        "empty",
    +        'empty',
             doc(),
             """Test a simple empty document object.""",
         ),
         TestCase(
    -        "bad-element",
    -        doc(b"f"),
    -        "The element content is not valid",
    +        'bad-element',
    +        doc(b'f'),
    +        'The element content is not valid',
             error=corruption_at(6),
         ),
         TestCase(
    -        "invalid-type",
    -        doc(elem("foo", 0xE, b"foo")),
    +        'invalid-type',
    +        doc(elem('foo', 0xE, b'foo')),
             """The type tag "0x0e" is not a valid type""",
             error=corruption_at(9),
         ),
         TestCase(
    -        "key/invalid/accept",
    +        'key/invalid/accept',
             doc(
    -            utf8elem("a", "b"),
    -            utf8elem(b"foo\xffbar", "baz"),
    -            utf8elem("c", "d"),
    +            utf8elem('a', 'b'),
    +            utf8elem(b'foo\xffbar', 'baz'),
    +            utf8elem('c', 'd'),
             ),
             """
             The element key contains an invalid UTF-8 byte, but we accept it
    @@ -308,49 +308,49 @@ def disallowed_key(char: str, k: str) -> str:
             """,
         ),
         TestCase(
    -        "key/invalid/reject",
    +        'key/invalid/reject',
             doc(
    -            utf8elem("a", "b"),
    -            elem(b"foo\xffbar", Tag.UTF8, string("baz")),
    -            utf8elem("c", "d"),
    +            utf8elem('a', 'b'),
    +            elem(b'foo\xffbar', Tag.UTF8, string('baz')),
    +            utf8elem('c', 'd'),
             ),
             """
             The element key is not valid UTF-8 and we reject it when we do UTF-8
             validation.
             """,
             flags=BSON_VALIDATE_UTF8,
    -        error=ErrorInfo(BSON_VALIDATE_UTF8, "Text element is not valid UTF-8", 13),
    +        error=ErrorInfo(BSON_VALIDATE_UTF8, 'Text element is not valid UTF-8', 13),
         ),
         TestCase(
    -        "key/empty/accept",
    -        doc(utf8elem("", "string")),
    +        'key/empty/accept',
    +        doc(utf8elem('', 'string')),
             """
             The element has an empty string key, and we accept this.
             """,
         ),
         TestCase(
    -        "key/empty/reject",
    +        'key/empty/reject',
             doc(
    -            utf8elem("a", "b"),
    -            utf8elem("", "string"),
    +            utf8elem('a', 'b'),
    +            utf8elem('', 'string'),
             ),
             """
             The element has an empty key, and we can reject it.
             """,
             flags=BSON_VALIDATE_EMPTY_KEYS,
    -        error=ErrorInfo(BSON_VALIDATE_EMPTY_KEYS, "Element key cannot be an empty string", 13),
    +        error=ErrorInfo(BSON_VALIDATE_EMPTY_KEYS, 'Element key cannot be an empty string', 13),
         ),
         TestCase(
    -        "key/empty/accept-if-absent",
    -        doc(utf8elem("foo", "bar")),
    +        'key/empty/accept-if-absent',
    +        doc(utf8elem('foo', 'bar')),
             """
             We are checking for empty keys, and accept if they are absent.
             """,
             flags=BSON_VALIDATE_EMPTY_KEYS,
         ),
         TestCase(
    -        "key/dot/accept",
    -        doc(utf8elem("foo.bar", "baz")),
    +        'key/dot/accept',
    +        doc(utf8elem('foo.bar', 'baz')),
             """
             The element key has an ASCII dot, and we accept this since we don't
             ask to validate it.
    @@ -358,42 +358,42 @@ def disallowed_key(char: str, k: str) -> str:
             flags=BSON_VALIDATE_EMPTY_KEYS,
         ),
         TestCase(
    -        "key/dot/reject",
    -        doc(utf8elem("a", "b"), utf8elem("foo.bar", "baz")),
    +        'key/dot/reject',
    +        doc(utf8elem('a', 'b'), utf8elem('foo.bar', 'baz')),
             """
             The element has an ASCII dot, and we reject it when we ask to validate
             it.
             """,
             flags=BSON_VALIDATE_DOT_KEYS,
    -        error=ErrorInfo(BSON_VALIDATE_DOT_KEYS, disallowed_key(".", "foo.bar"), 13),
    +        error=ErrorInfo(BSON_VALIDATE_DOT_KEYS, disallowed_key('.', 'foo.bar'), 13),
         ),
         TestCase(
    -        "key/dot/accept-if-absent",
    -        doc(utf8elem("foo", "bar")),
    +        'key/dot/accept-if-absent',
    +        doc(utf8elem('foo', 'bar')),
             """
             We are checking for keys with dot '.', and accept if they are absent.
             """,
             flags=BSON_VALIDATE_DOT_KEYS,
         ),
         TestCase(
    -        "key/dollar/accept",
    -        doc(utf8elem("a", "b"), utf8elem("$foo", "bar")),
    +        'key/dollar/accept',
    +        doc(utf8elem('a', 'b'), utf8elem('$foo', 'bar')),
             """
             We can accept an element key that starts with a dollar '$' sign.
             """,
         ),
         TestCase(
    -        "key/dollar/reject",
    -        doc(utf8elem("a", "b"), utf8elem("$foo", "bar")),
    +        'key/dollar/reject',
    +        doc(utf8elem('a', 'b'), utf8elem('$foo', 'bar')),
             """
             We can reject an element key that starts with a dollar '$' sign.
             """,
             flags=BSON_VALIDATE_DOLLAR_KEYS,
    -        error=ErrorInfo(BSON_VALIDATE_DOLLAR_KEYS, disallowed_key("$", "$foo"), 13),
    +        error=ErrorInfo(BSON_VALIDATE_DOLLAR_KEYS, disallowed_key('$', '$foo'), 13),
         ),
         TestCase(
    -        "key/dollar/accept-in-middle",
    -        doc(utf8elem("foo$bar", "baz")),
    +        'key/dollar/accept-in-middle',
    +        doc(utf8elem('foo$bar', 'baz')),
             """
             This contains a element key "foo$bar", but we don't reject this, as we
             only care about keys that *start* with dollars.
    @@ -401,8 +401,8 @@ def disallowed_key(char: str, k: str) -> str:
             flags=BSON_VALIDATE_DOLLAR_KEYS,
         ),
         TestCase(
    -        "key/dollar/accept-if-absent",
    -        doc(utf8elem("foo", "bar")),
    +        'key/dollar/accept-if-absent',
    +        doc(utf8elem('foo', 'bar')),
             """
             We are validating for dollar-keys, and we accept because this document
             doesn't contain any such keys.
    @@ -410,13 +410,13 @@ def disallowed_key(char: str, k: str) -> str:
             flags=BSON_VALIDATE_DOLLAR_KEYS,
         ),
         TestCase(
    -        "utf8/simple",
    -        doc(utf8elem("string", "some string")),
    -        "Simple UTF-8 string element",
    +        'utf8/simple',
    +        doc(utf8elem('string', 'some string')),
    +        'Simple UTF-8 string element',
         ),
         TestCase(
    -        "utf8/missing-null",
    -        doc(elem("a", Tag.UTF8, i32le(4), b"abcd")),
    +        'utf8/missing-null',
    +        doc(elem('a', Tag.UTF8, i32le(4), b'abcd')),
             """
             The UTF-8 element "a" contains 4 characters and declares its length of 4,
             but the fourth character is supposed to be a null terminator. In this case,
    @@ -425,87 +425,87 @@ def disallowed_key(char: str, k: str) -> str:
             error=corruption_at(14),
         ),
         TestCase(
    -        "utf8/length-zero",
    -        doc(elem("", Tag.UTF8, i32le(0), b"\0")),
    -        "UTF-8 string length must always be at least 1 for the null terminator",
    +        'utf8/length-zero',
    +        doc(elem('', Tag.UTF8, i32le(0), b'\0')),
    +        'UTF-8 string length must always be at least 1 for the null terminator',
             error=corruption_at(6),
         ),
         TestCase(
    -        "utf8/length-too-short",
    -        doc(elem("", Tag.UTF8, i32le(3), b"bar\0")),
    -        "UTF-8 string is three chars and a null terminator, but the declared length is 3 (should be 4)",
    +        'utf8/length-too-short',
    +        doc(elem('', Tag.UTF8, i32le(3), b'bar\0')),
    +        'UTF-8 string is three chars and a null terminator, but the declared length is 3 (should be 4)',
             error=corruption_at(12),
         ),
         TestCase(
    -        "utf8/header-too-large",
    -        doc(elem("foo", Tag.UTF8, b"\xff\xff\xff\xffbar\0")),
    +        'utf8/header-too-large',
    +        doc(elem('foo', Tag.UTF8, b'\xff\xff\xff\xffbar\0')),
             """
             Data { "foo": "bar" } but the declared length of "bar" is way too large.
             """,
             error=corruption_at(9),
         ),
         TestCase(
    -        "utf8/valid",
    -        doc(elem("foo", Tag.UTF8, string("abcd"))),
    +        'utf8/valid',
    +        doc(elem('foo', Tag.UTF8, string('abcd'))),
             """
             Validate a valid UTF-8 string with UTF-8 validation enabled.
             """,
             flags=BSON_VALIDATE_UTF8,
         ),
         TestCase(
    -        "utf8/invalid/accept",
    -        doc(utf8elem("foo", b"abc\xffd")),
    +        'utf8/invalid/accept',
    +        doc(utf8elem('foo', b'abc\xffd')),
             """
             Validate an invalid UTF-8 string, but accept invalid UTF-8.
             """,
         ),
         TestCase(
    -        "utf8/invalid/reject",
    -        doc(utf8elem("foo", b"abc\xffd")),
    +        'utf8/invalid/reject',
    +        doc(utf8elem('foo', b'abc\xffd')),
             """
             Validate an invalid UTF-8 string, and expect rejection.
             """,
             flags=BSON_VALIDATE_UTF8,
    -        error=ErrorInfo(BSON_VALIDATE_UTF8, "Text element is not valid UTF-8", 4),
    +        error=ErrorInfo(BSON_VALIDATE_UTF8, 'Text element is not valid UTF-8', 4),
         ),
         TestCase(
    -        "utf8/valid-with-null/accept-1",
    -        doc(utf8elem("foo", b"abc\x00123")),
    +        'utf8/valid-with-null/accept-1',
    +        doc(utf8elem('foo', b'abc\x00123')),
             """
             This is a valid UTF-8 string that contains a null character. We accept
             it because we don't do UTF-8 validation.
             """,
         ),
         TestCase(
    -        "utf8/valid-with-null/accept-2",
    -        doc(utf8elem("foo", b"abc\x00123")),
    +        'utf8/valid-with-null/accept-2',
    +        doc(utf8elem('foo', b'abc\x00123')),
             """
             This is a valid UTF-8 string that contains a null character. We allow
             it explicitly when we request UTF-8 validation.
             """,
    -        flags=f"{BSON_VALIDATE_UTF8} | {BSON_VALIDATE_UTF8_ALLOW_NULL}",
    +        flags=f'{BSON_VALIDATE_UTF8} | {BSON_VALIDATE_UTF8_ALLOW_NULL}',
         ),
         TestCase(
    -        "utf8/valid-with-null/reject",
    -        doc(utf8elem("foo", b"abc\x00123")),
    +        'utf8/valid-with-null/reject',
    +        doc(utf8elem('foo', b'abc\x00123')),
             """
             This is a valid UTF-8 string that contains a null character. We reject
             this because we don't pass BSON_VALIDATE_UTF8_ALLOW_NULL.
             """,
             flags=BSON_VALIDATE_UTF8,
    -        error=ErrorInfo(BSON_VALIDATE_UTF8_ALLOW_NULL, "UTF-8 string contains a U+0000 (null) character", 4),
    +        error=ErrorInfo(BSON_VALIDATE_UTF8_ALLOW_NULL, 'UTF-8 string contains a U+0000 (null) character', 4),
         ),
         TestCase(
    -        "utf8/overlong-null/accept-1",
    -        doc(utf8elem("foo", b"abc\xc0\x80123")),
    +        'utf8/overlong-null/accept-1',
    +        doc(utf8elem('foo', b'abc\xc0\x80123')),
             """
             This is an *invalid* UTF-8 string, and contains an overlong null. We should
             accept it because we aren't doing UTF-8 validation.
             """,
         ),
         TestCase(
    -        "utf8/overlong-null/accept-2",
    -        doc(utf8elem("foo", b"abc\xc0\x80123")),
    +        'utf8/overlong-null/accept-2',
    +        doc(utf8elem('foo', b'abc\xc0\x80123')),
             """
             ! NOTE: overlong-null: This test relies on our UTF-8 validation accepting the `c0 80` sequence
     
    @@ -517,11 +517,11 @@ def disallowed_key(char: str, k: str) -> str:
             If/when UTF-8 validation is changed to reject overlong null, then this
             test should change to expect rejection the invalid UTF-8.
             """,
    -        flags=f"{BSON_VALIDATE_UTF8} | {BSON_VALIDATE_UTF8_ALLOW_NULL}",
    +        flags=f'{BSON_VALIDATE_UTF8} | {BSON_VALIDATE_UTF8_ALLOW_NULL}',
         ),
         TestCase(
    -        "utf8/overlong-null/reject",
    -        doc(utf8elem("foo", b"abc\xc0\x80123")),
    +        'utf8/overlong-null/reject',
    +        doc(utf8elem('foo', b'abc\xc0\x80123')),
             """
             ! NOTE: overlong-null: This test relies on our UTF-8 validation accepting the `c0 80` sequence
     
    @@ -534,29 +534,29 @@ def disallowed_key(char: str, k: str) -> str:
             expected error code and error message for this test should change.
             """,
             flags=BSON_VALIDATE_UTF8,
    -        error=ErrorInfo(BSON_VALIDATE_UTF8_ALLOW_NULL, "UTF-8 string contains a U+0000 (null) character", 4),
    +        error=ErrorInfo(BSON_VALIDATE_UTF8_ALLOW_NULL, 'UTF-8 string contains a U+0000 (null) character', 4),
         ),
         TestCase(
    -        "utf8-key/invalid/accept",
    -        doc(utf8elem(b"abc\xffdef", "bar")),
    +        'utf8-key/invalid/accept',
    +        doc(utf8elem(b'abc\xffdef', 'bar')),
             """
             The element key is not valid UTf-8, but we accept it if we don't do
             UTF-8 validation.
             """,
         ),
         TestCase(
    -        "utf8-key/invalid/reject",
    -        doc(utf8elem(b"abc\xffdef", "bar")),
    +        'utf8-key/invalid/reject',
    +        doc(utf8elem(b'abc\xffdef', 'bar')),
             """
             The element key is not valid UTF-8, and we reject it when we requested
             UTF-8 validation.
             """,
             flags=BSON_VALIDATE_UTF8,
    -        error=ErrorInfo(BSON_VALIDATE_UTF8, "Text element is not valid UTF-8", 4),
    +        error=ErrorInfo(BSON_VALIDATE_UTF8, 'Text element is not valid UTF-8', 4),
         ),
         TestCase(
    -        "utf8-key/overlong-null/reject",
    -        doc(utf8elem(b"abc\xc0\x80def", "bar")),
    +        'utf8-key/overlong-null/reject',
    +        doc(utf8elem(b'abc\xc0\x80def', 'bar')),
             """
             ! NOTE: overlong-null: This test relies on our UTF-8 validation accepting the `c0 80` sequence
     
    @@ -568,11 +568,11 @@ def disallowed_key(char: str, k: str) -> str:
             expected error code and error message for this test should change.
             """,
             flags=BSON_VALIDATE_UTF8,
    -        error=ErrorInfo(BSON_VALIDATE_UTF8_ALLOW_NULL, "UTF-8 string contains a U+0000 (null) character", 4),
    +        error=ErrorInfo(BSON_VALIDATE_UTF8_ALLOW_NULL, 'UTF-8 string contains a U+0000 (null) character', 4),
         ),
         TestCase(
    -        "utf8-key/overlong-null/accept",
    -        doc(utf8elem(b"abc\xc0\x80def", "bar")),
    +        'utf8-key/overlong-null/accept',
    +        doc(utf8elem(b'abc\xc0\x80def', 'bar')),
             """
             ! NOTE: overlong-null: This test relies on our UTF-8 validation accepting the `c0 80` sequence
     
    @@ -583,54 +583,54 @@ def disallowed_key(char: str, k: str) -> str:
             If/when UTF-8 validation is changed to reject overlong null, then this
             test case should instead reject the key string as invalid UTF-8.
             """,
    -        flags=f"{BSON_VALIDATE_UTF8} | {BSON_VALIDATE_UTF8_ALLOW_NULL}",
    +        flags=f'{BSON_VALIDATE_UTF8} | {BSON_VALIDATE_UTF8_ALLOW_NULL}',
         ),
         TestCase(
    -        "array/empty",
    -        doc(elem("array", Tag.Array, doc())),
    -        "Simple empty array element",
    +        'array/empty',
    +        doc(elem('array', Tag.Array, doc())),
    +        'Simple empty array element',
         ),
         TestCase(
    -        "array/simple",
    +        'array/simple',
             doc(
                 elem(
    -                "array",
    +                'array',
                     Tag.Array,
                     doc(
    -                    elem("0", Tag.Int32, i32le(42)),
    -                    elem("1", Tag.Int32, i32le(1729)),
    -                    elem("2", Tag.Int32, i32le(-8)),
    +                    elem('0', Tag.Int32, i32le(42)),
    +                    elem('1', Tag.Int32, i32le(1729)),
    +                    elem('2', Tag.Int32, i32le(-8)),
                     ),
                 )
             ),
    -        "Simple array element of integers",
    +        'Simple array element of integers',
         ),
         TestCase(
    -        "array/invalid-element",
    +        'array/invalid-element',
             doc(
                 elem(
    -                "array",
    +                'array',
                     Tag.Array,
                     doc(
    -                    elem("0", Tag.Int32, i32le(42)),
    -                    elem("1", Tag.Int32, i32le(1729)[-1:]),  # Truncated
    -                    elem("2", Tag.Int32, i32le(-8)),
    +                    elem('0', Tag.Int32, i32le(42)),
    +                    elem('1', Tag.Int32, i32le(1729)[-1:]),  # Truncated
    +                    elem('2', Tag.Int32, i32le(-8)),
                     ),
                 )
             ),
    -        "Simple array element of integers, but one element is truncated",
    +        'Simple array element of integers, but one element is truncated',
             error=corruption_at(34),
         ),
         TestCase(
    -        "array/invalid-element-check-offset",
    +        'array/invalid-element-check-offset',
             doc(
                 elem(
    -                "array-shifted",
    +                'array-shifted',
                     Tag.Array,
                     doc(
    -                    elem("0", Tag.Int32, i32le(42)),
    -                    elem("1", Tag.Int32, i32le(1729)[-1:]),  # Truncated
    -                    elem("2", Tag.Int32, i32le(-8)),
    +                    elem('0', Tag.Int32, i32le(42)),
    +                    elem('1', Tag.Int32, i32le(1729)[-1:]),  # Truncated
    +                    elem('2', Tag.Int32, i32le(-8)),
                     ),
                 )
             ),
    @@ -642,37 +642,37 @@ def disallowed_key(char: str, k: str) -> str:
             error=corruption_at(42),
         ),
         TestCase(
    -        "symbol/simple",
    -        doc(elem("symbol", Tag.Symbol, string("void 0;"))),
    +        'symbol/simple',
    +        doc(elem('symbol', Tag.Symbol, string('void 0;'))),
             """
             A simple document: { symbol: Symbol("void 0;") }
             """,
         ),
         TestCase(
    -        "symbol/invalid-utf8/accept",
    -        doc(elem("symbol", Tag.Symbol, string(b"void\xff 0;"))),
    +        'symbol/invalid-utf8/accept',
    +        doc(elem('symbol', Tag.Symbol, string(b'void\xff 0;'))),
             """
             A simple symbol document, but the string contains invalid UTF-8
             """,
         ),
         TestCase(
    -        "symbol/invalid-utf8/reject",
    -        doc(elem("symbol", Tag.Symbol, string(b"void\xff 0;"))),
    +        'symbol/invalid-utf8/reject',
    +        doc(elem('symbol', Tag.Symbol, string(b'void\xff 0;'))),
             """
             A simple symbol document, but the string contains invalid UTF-8
             """,
             flags=BSON_VALIDATE_UTF8,
    -        error=ErrorInfo(BSON_VALIDATE_UTF8, "Text element is not valid UTF-8", 4),
    +        error=ErrorInfo(BSON_VALIDATE_UTF8, 'Text element is not valid UTF-8', 4),
         ),
         TestCase(
    -        "symbol/length-zero",
    -        doc(b"\x0e\0" + i32le(0) + b"\0"),
    -        "Symbol string length must always be at least 1 for the null terminator",
    +        'symbol/length-zero',
    +        doc(b'\x0e\0' + i32le(0) + b'\0'),
    +        'Symbol string length must always be at least 1 for the null terminator',
             error=corruption_at(6),
         ),
         TestCase(
    -        "symbol/length-too-short",
    -        doc(b"\x0e\0" + i32le(3) + b"bar\0"),
    +        'symbol/length-too-short',
    +        doc(b'\x0e\0' + i32le(3) + b'bar\0'),
             """
             Symbol string is three chars and a null terminator, but the declared
             length is 3 (should be 4)
    @@ -680,55 +680,55 @@ def disallowed_key(char: str, k: str) -> str:
             error=corruption_at(12),
         ),
         TestCase(
    -        "code/simple",
    -        doc(elem("code", Tag.Code, string("void 0;"))),
    +        'code/simple',
    +        doc(elem('code', Tag.Code, string('void 0;'))),
             """
             A simple document: { code: Code("void 0;") }
             """,
         ),
         TestCase(
    -        "code/invalid-utf8/accept",
    -        doc(elem("code", Tag.Code, string(b"void\xff 0;"))),
    +        'code/invalid-utf8/accept',
    +        doc(elem('code', Tag.Code, string(b'void\xff 0;'))),
             """
             A simple code document, but the string contains invalid UTF-8
             """,
         ),
         TestCase(
    -        "code/invalid-utf8/reject",
    -        doc(elem("code", Tag.Code, string(b"void\xff 0;"))),
    +        'code/invalid-utf8/reject',
    +        doc(elem('code', Tag.Code, string(b'void\xff 0;'))),
             """
             A simple code document, but the string contains invalid UTF-8
             """,
             flags=BSON_VALIDATE_UTF8,
    -        error=ErrorInfo(BSON_VALIDATE_UTF8, "Text element is not valid UTF-8", 4),
    +        error=ErrorInfo(BSON_VALIDATE_UTF8, 'Text element is not valid UTF-8', 4),
         ),
         TestCase(
    -        "code/length-zero",
    -        doc(elem("code", Tag.Code, i32le(0), b"\0")),
    -        "Code string length must always be at least 1 for the null terminator",
    +        'code/length-zero',
    +        doc(elem('code', Tag.Code, i32le(0), b'\0')),
    +        'Code string length must always be at least 1 for the null terminator',
             error=corruption_at(10),
         ),
         TestCase(
    -        "code/length-too-short",
    -        doc(elem("code", Tag.Code, i32le(3), b"bar\0")),
    -        "Code string is three chars and a null terminator, but the declared length is 3 (should be 4)",
    +        'code/length-too-short',
    +        doc(elem('code', Tag.Code, i32le(3), b'bar\0')),
    +        'Code string is three chars and a null terminator, but the declared length is 3 (should be 4)',
             error=corruption_at(16),
         ),
         # Code w/ scope
         TestCase(
    -        "code-with-scope/simple",
    -        doc(elem("foo", Tag.CodeWithScope, code_with_scope("void 0;", doc()))),
    -        "A simple valid code-with-scope element",
    +        'code-with-scope/simple',
    +        doc(elem('foo', Tag.CodeWithScope, code_with_scope('void 0;', doc()))),
    +        'A simple valid code-with-scope element',
         ),
         TestCase(
    -        "code-with-scope/invalid-code-length-zero",
    +        'code-with-scope/invalid-code-length-zero',
             doc(
                 elem(
    -                "",
    +                '',
                     Tag.CodeWithScope,
                     i32le(10),
    -                b"\0\0\0\0",  # strlen
    -                b"\0",  # code
    +                b'\0\0\0\0',  # strlen
    +                b'\0',  # code
                     doc(),  # scope
                 )
             ),
    @@ -739,14 +739,14 @@ def disallowed_key(char: str, k: str) -> str:
             error=corruption_at(6),
         ),
         TestCase(
    -        "code-with-scope/invalid-code-length-too-large",
    +        'code-with-scope/invalid-code-length-too-large',
             doc(
                 elem(
    -                "",
    +                '',
                     Tag.CodeWithScope,
                     i32le(10),
    -                b"\xff\xff\xff\xff",  # strlen (too big)
    -                b"\0",
    +                b'\xff\xff\xff\xff',  # strlen (too big)
    +                b'\0',
                     doc(),  # Scope
                 )
             ),
    @@ -756,21 +756,21 @@ def disallowed_key(char: str, k: str) -> str:
             error=corruption_at(6),
         ),
         TestCase(
    -        "code-with-scope/invalid-scope",
    -        doc(elem("foo", Tag.CodeWithScope, code_with_scope("void 0;", doc()[:-1]))),
    -        "A code-with-scope element, but the scope document is corrupted",
    +        'code-with-scope/invalid-scope',
    +        doc(elem('foo', Tag.CodeWithScope, code_with_scope('void 0;', doc()[:-1]))),
    +        'A code-with-scope element, but the scope document is corrupted',
             error=corruption_at(13),
         ),
         TestCase(
    -        "code-with-scope/empty-key-in-scope",
    +        'code-with-scope/empty-key-in-scope',
             doc(
                 elem(
    -                "code",
    +                'code',
                     Tag.CodeWithScope,
                     code_with_scope(
    -                    "void 0;",
    +                    'void 0;',
                         doc(
    -                        elem("obj", Tag.Document, doc(utf8elem("", "string"))),
    +                        elem('obj', Tag.Document, doc(utf8elem('', 'string'))),
                         ),
                     ),
                 )
    @@ -784,59 +784,59 @@ def disallowed_key(char: str, k: str) -> str:
             flags=BSON_VALIDATE_EMPTY_KEYS,
         ),
         TestCase(
    -        "code-with-scope/corrupt-scope",
    +        'code-with-scope/corrupt-scope',
             doc(
                 elem(
    -                "code",
    +                'code',
                     Tag.CodeWithScope,
                     code_with_scope(
    -                    "void 0;",
    +                    'void 0;',
                         doc(
                             elem(
    -                            "foo",
    +                            'foo',
                                 Tag.UTF8,
                                 i32le(0),  # Invalid string length
    -                            b"\0",
    +                            b'\0',
                             )
                         ),
                     ),
                 )
             ),
    -        "A code-with-scope element, but the scope contains corruption",
    +        'A code-with-scope element, but the scope contains corruption',
             error=ErrorInfo(BSON_VALIDATE_CORRUPT, 'Error in scope document for element "code": corrupt BSON', offset=13),
         ),
         TestCase(
    -        "code-with-scope/corrupt-scope-2",
    +        'code-with-scope/corrupt-scope-2',
             doc(
                 elem(
    -                "code",
    +                'code',
                     Tag.CodeWithScope,
                     code_with_scope(
    -                    "void 0;",
    +                    'void 0;',
                         doc(
                             elem(
    -                            "foo",
    +                            'foo',
                                 Tag.UTF8,
    -                            b"\xff\xff\xff\xff",  # Invalid string length
    -                            b"\0",
    +                            b'\xff\xff\xff\xff',  # Invalid string length
    +                            b'\0',
                             )
                         ),
                     ),
                 )
             ),
    -        "A code-with-scope element, but the scope contains corruption",
    +        'A code-with-scope element, but the scope contains corruption',
             error=ErrorInfo(BSON_VALIDATE_CORRUPT, 'Error in scope document for element "code": corrupt BSON', offset=13),
         ),
         TestCase(
    -        "regex/simple",
    -        doc(elem("regex", Tag.Regex, b"1234\0gi\0")),
    +        'regex/simple',
    +        doc(elem('regex', Tag.Regex, b'1234\0gi\0')),
             """
             Simple document: { regex: Regex("1234", "gi") }
             """,
         ),
         TestCase(
    -        "regex/invalid-opts",
    -        doc(elem("regex", Tag.Regex, b"foo\0bar")),
    +        'regex/invalid-opts',
    +        doc(elem('regex', Tag.Regex, b'foo\0bar')),
             """
             A regular expression element with missing null terminator. The main
             option string "foo" has a null terminator, but the option component "bar"
    @@ -847,8 +847,8 @@ def disallowed_key(char: str, k: str) -> str:
             error=corruption_at(18),
         ),
         TestCase(
    -        "regex/double-null",
    -        doc(elem("regex", Tag.Regex, b"foo\0bar\0\0")),
    +        'regex/double-null',
    +        doc(elem('regex', Tag.Regex, b'foo\0bar\0\0')),
             """
             A regular expression element with an extra null terminator. Since regex
             is delimited by its null terminator, the iterator will stop early before
    @@ -857,24 +857,24 @@ def disallowed_key(char: str, k: str) -> str:
             error=corruption_at(21),
         ),
         TestCase(
    -        "regex/invalid-utf8/accept",
    -        doc(elem("regex", Tag.Regex, b"foo\xffbar\0gi\0")),
    +        'regex/invalid-utf8/accept',
    +        doc(elem('regex', Tag.Regex, b'foo\xffbar\0gi\0')),
             """
             A regular expression that contains invalid UTF-8.
             """,
         ),
         TestCase(
    -        "regex/invalid-utf8/reject",
    -        doc(elem("regex", Tag.Regex, b"foo\xffbar\0gi\0")),
    +        'regex/invalid-utf8/reject',
    +        doc(elem('regex', Tag.Regex, b'foo\xffbar\0gi\0')),
             """
             A regular expression that contains invalid UTF-8.
             """,
             flags=BSON_VALIDATE_UTF8,
    -        error=ErrorInfo(BSON_VALIDATE_UTF8, "Text element is not valid UTF-8", 4),
    +        error=ErrorInfo(BSON_VALIDATE_UTF8, 'Text element is not valid UTF-8', 4),
         ),
         TestCase(
    -        "regex/invalid-utf8/accept-if-absent",
    -        doc(elem("regex", Tag.Regex, b"foo\0gi\0")),
    +        'regex/invalid-utf8/accept-if-absent',
    +        doc(elem('regex', Tag.Regex, b'foo\0gi\0')),
             """
             A regular valid UTf-8 regex. We check for invalid UTf-8, and accept becaues
             the regex is fine.
    @@ -882,14 +882,14 @@ def disallowed_key(char: str, k: str) -> str:
             flags=BSON_VALIDATE_UTF8,
         ),
         TestCase(
    -        "dbpointer/string-length-zero",
    +        'dbpointer/string-length-zero',
             doc(
                 elem(
    -                "foo",
    +                'foo',
                     Tag.DBPointer,
                     i32le(0),  # String length (invalid)
    -                b"\0",  # Empty string
    -                b"\x52\x59\xb5\x6a\xfa\x5b\xd8\x41\xd6\x58\x5d\x99",  # OID
    +                b'\0',  # Empty string
    +                b'\x52\x59\xb5\x6a\xfa\x5b\xd8\x41\xd6\x58\x5d\x99',  # OID
                 )
             ),
             """
    @@ -899,14 +899,14 @@ def disallowed_key(char: str, k: str) -> str:
             error=corruption_at(9),
         ),
         TestCase(
    -        "dbpointer/string-length-too-big",
    +        'dbpointer/string-length-too-big',
             doc(
                 elem(
    -                "foo",
    +                'foo',
                     Tag.DBPointer,
    -                b"\xff\xff\xff\xff",  # String length  (invalid)
    -                b"foobar\0",  # Simple string
    -                b"\x52\x59\xb5\x6a\xfa\x5b\xd8\x41\xd6\x58\x5d\x99",  # OID
    +                b'\xff\xff\xff\xff',  # String length  (invalid)
    +                b'foobar\0',  # Simple string
    +                b'\x52\x59\xb5\x6a\xfa\x5b\xd8\x41\xd6\x58\x5d\x99',  # OID
                 )
             ),
             """
    @@ -916,17 +916,17 @@ def disallowed_key(char: str, k: str) -> str:
             error=corruption_at(9),
         ),
         TestCase(
    -        "dbpointer/truncated",
    +        'dbpointer/truncated',
             doc(
    -            utf8elem("a", "b"),
    +            utf8elem('a', 'b'),
                 elem(
    -                "foo",
    +                'foo',
                     Tag.DBPointer,
                     i32le(7),  # 7 bytes, bleeding into the null terminator
    -                b"foobar",  # Simple string, missing a null terminator.
    -                b"\x00\x59\xb5\x6a\xfa\x5b\xd8\x41\xd6\x58\x5d\x99",  # OID
    +                b'foobar',  # Simple string, missing a null terminator.
    +                b'\x00\x59\xb5\x6a\xfa\x5b\xd8\x41\xd6\x58\x5d\x99',  # OID
                 ),
    -            utf8elem("a", "b"),
    +            utf8elem('a', 'b'),
             ),
             """
             Document { "foo": DBPointer("foobar", ) }, but the length header on
    @@ -936,14 +936,14 @@ def disallowed_key(char: str, k: str) -> str:
             error=corruption_at(43),
         ),
         TestCase(
    -        "dbpointer/missing-null",
    +        'dbpointer/missing-null',
             doc(
                 elem(
    -                "foo",
    +                'foo',
                     Tag.DBPointer,
                     i32le(4),
    -                b"abcd",  # Missing null terminator
    -                b"\x52\x59\xb5\x6a\xfa\x5b\xd8\x41\xd6\x58\x5d\x99",  # OID
    +                b'abcd',  # Missing null terminator
    +                b'\x52\x59\xb5\x6a\xfa\x5b\xd8\x41\xd6\x58\x5d\x99',  # OID
                 )
             ),
             """
    @@ -953,13 +953,13 @@ def disallowed_key(char: str, k: str) -> str:
             error=corruption_at(16),
         ),
         TestCase(
    -        "dbpointer/invalid-utf8/accept",
    +        'dbpointer/invalid-utf8/accept',
             doc(
                 elem(
    -                "foo",
    +                'foo',
                     Tag.DBPointer,
    -                string(b"abc\xffdef"),  # String with invalid UTF-8
    -                b"\x52\x59\xb5\x6a\xfa\x5b\xd8\x41\xd6\x58\x5d\x99",  # OID
    +                string(b'abc\xffdef'),  # String with invalid UTF-8
    +                b'\x52\x59\xb5\x6a\xfa\x5b\xd8\x41\xd6\x58\x5d\x99',  # OID
                 )
             ),
             """
    @@ -967,29 +967,29 @@ def disallowed_key(char: str, k: str) -> str:
             """,
         ),
         TestCase(
    -        "dbpointer/invalid-utf8/reject",
    +        'dbpointer/invalid-utf8/reject',
             doc(
                 elem(
    -                "foo",
    +                'foo',
                     Tag.DBPointer,
    -                string(b"abc\xffdef"),  # String with invalid UTF-8
    -                b"\x52\x59\xb5\x6a\xfa\x5b\xd8\x41\xd6\x58\x5d\x99",  # OID
    +                string(b'abc\xffdef'),  # String with invalid UTF-8
    +                b'\x52\x59\xb5\x6a\xfa\x5b\xd8\x41\xd6\x58\x5d\x99',  # OID
                 )
             ),
             """
             DBPointer document, but the collection string contains invalid UTF-8
             """,
             flags=BSON_VALIDATE_UTF8,
    -        error=ErrorInfo(BSON_VALIDATE_UTF8, "Text element is not valid UTF-8", 4),
    +        error=ErrorInfo(BSON_VALIDATE_UTF8, 'Text element is not valid UTF-8', 4),
         ),
         TestCase(
    -        "dbpointer/invalid-utf8/accept-if-absent",
    +        'dbpointer/invalid-utf8/accept-if-absent',
             doc(
                 elem(
    -                "foo",
    +                'foo',
                     Tag.DBPointer,
    -                string(b"abcdef"),  # Valid string
    -                b"\x52\x59\xb5\x6a\xfa\x5b\xd8\x41\xd6\x58\x5d\x99",  # OID
    +                string(b'abcdef'),  # Valid string
    +                b'\x52\x59\xb5\x6a\xfa\x5b\xd8\x41\xd6\x58\x5d\x99',  # OID
                 )
             ),
             """
    @@ -999,15 +999,15 @@ def disallowed_key(char: str, k: str) -> str:
             flags=BSON_VALIDATE_UTF8,
         ),
         TestCase(
    -        "subdoc/simple",
    -        doc(elem("doc", Tag.Document, doc(utf8elem("foo", "bar")))),
    +        'subdoc/simple',
    +        doc(elem('doc', Tag.Document, doc(utf8elem('foo', 'bar')))),
             """
             A simple document: { doc: { foo: "bar" } }
             """,
         ),
         TestCase(
    -        "subdoc/invalid-shared-null",
    -        doc(elem("doc", Tag.Document, doc()[:-1])),
    +        'subdoc/invalid-shared-null',
    +        doc(elem('doc', Tag.Document, doc()[:-1])),
             """
             A truncated subdocument element, with its null terminator accidentally
             overlapping the parent document's null.
    @@ -1015,8 +1015,8 @@ def disallowed_key(char: str, k: str) -> str:
             error=corruption_at(9),
         ),
         TestCase(
    -        "subdoc/overlapping-utf8-null",
    -        doc(elem("doc", Tag.Document, doc(utf8elem("bar", "baz\0")[:-1]))),
    +        'subdoc/overlapping-utf8-null',
    +        doc(elem('doc', Tag.Document, doc(utf8elem('bar', 'baz\0')[:-1]))),
             """
             Encodes the document:
     
    @@ -1028,18 +1028,18 @@ def disallowed_key(char: str, k: str) -> str:
             error=corruption_at(18),
         ),
         TestCase(
    -        "subdoc/invalid-element",
    -        doc(elem("doc", Tag.Document, doc(elem("dbl", Tag.Double, b"abcd")))),
    -        "A subdocument that contains an invalid element",
    +        'subdoc/invalid-element',
    +        doc(elem('doc', Tag.Document, doc(elem('dbl', Tag.Double, b'abcd')))),
    +        'A subdocument that contains an invalid element',
             error=corruption_at(18),
         ),
         TestCase(
    -        "subdoc/header-too-large",
    +        'subdoc/header-too-large',
             doc(
                 elem(
    -                "foo",
    +                'foo',
                     Tag.Document,
    -                b"\xf7\xff\xff\xff\0",  # Bad document
    +                b'\xf7\xff\xff\xff\0',  # Bad document
                 ),
             ),
             """
    @@ -1048,12 +1048,12 @@ def disallowed_key(char: str, k: str) -> str:
             error=corruption_at(9),
         ),
         TestCase(
    -        "subdoc/header-too-small",
    +        'subdoc/header-too-small',
             doc(
                 elem(
    -                "test",
    +                'test',
                     Tag.Document,
    -                b"\x04\0\0\0",  # Only four bytes. All docs must be at least 5
    +                b'\x04\0\0\0',  # Only four bytes. All docs must be at least 5
                 ),
             ),
             """
    @@ -1062,12 +1062,12 @@ def disallowed_key(char: str, k: str) -> str:
             error=corruption_at(4),
         ),
         TestCase(
    -        "subdoc/impossible-size",
    +        'subdoc/impossible-size',
             doc(
                 elem(
    -                "foo",
    +                'foo',
                     Tag.Document,
    -                b"\xff\xff\xff\xff\0",  # Bad document
    +                b'\xff\xff\xff\xff\0',  # Bad document
                 ),
             ),
             """
    @@ -1077,36 +1077,36 @@ def disallowed_key(char: str, k: str) -> str:
             error=corruption_at(9),
         ),
         TestCase(
    -        "null/simple",
    -        doc(elem("null", Tag.Null)),
    +        'null/simple',
    +        doc(elem('null', Tag.Null)),
             """
             A simple document: { "null": null }
             """,
         ),
         TestCase(
    -        "undefined/simple",
    -        doc(elem("undefined", Tag.Undefined)),
    +        'undefined/simple',
    +        doc(elem('undefined', Tag.Undefined)),
             """
             A simple document: { "undefined": undefined }
             """,
         ),
         TestCase(
    -        "binary/simple",
    -        doc(elem("binary", Tag.Binary, binary(0x80, b"12345"))),
    +        'binary/simple',
    +        doc(elem('binary', Tag.Binary, binary(0x80, b'12345'))),
             """
             Simple binary data { "binary": Binary(0x80, b'12345') }
             """,
         ),
         TestCase(
    -        "binary/bad-length-zero-subtype-2",
    +        'binary/bad-length-zero-subtype-2',
             doc(
                 elem(
    -                "binary",
    +                'binary',
                     Tag.Binary,
                     i32le(0),  # Invalid: Zero length
    -                b"\x02",  # subtype two
    +                b'\x02',  # subtype two
                     i32le(4),  # Length of 4
    -                b"1234",  # payload
    +                b'1234',  # payload
                 ),
             ),
             """
    @@ -1116,15 +1116,15 @@ def disallowed_key(char: str, k: str) -> str:
             error=corruption_at(12),
         ),
         TestCase(
    -        "binary/bad-inner-length-on-subtype-2",
    +        'binary/bad-inner-length-on-subtype-2',
             doc(
                 elem(
    -                "binary",
    +                'binary',
                     Tag.Binary,
                     i32le(8),  # Valid length
    -                b"\x02",  # subtype two
    +                b'\x02',  # subtype two
                     i32le(2),  # Invalid length of (should be 4)
    -                b"1234",  # payload
    +                b'1234',  # payload
                 ),
             ),
             """
    @@ -1134,14 +1134,14 @@ def disallowed_key(char: str, k: str) -> str:
             error=corruption_at(17),
         ),
         TestCase(
    -        "binary/bad-length-too-small",
    +        'binary/bad-length-too-small',
             doc(
                 elem(
    -                "binary",
    +                'binary',
                     Tag.Binary,
                     i32le(2),  # Length prefix (too small)
    -                b"\x80",  # subtype
    -                b"1234",  # payload
    +                b'\x80',  # subtype
    +                b'1234',  # payload
                 ),
             ),
             """
    @@ -1155,14 +1155,14 @@ def disallowed_key(char: str, k: str) -> str:
             error=corruption_at(22),
         ),
         TestCase(
    -        "binary/bad-length-too-big",
    +        'binary/bad-length-too-big',
             doc(
                 elem(
    -                "binary",
    +                'binary',
                     Tag.Binary,
    -                b"\xf3\xff\xff\xff",  # Length prefix (too big)
    -                b"\x80",  # subtype
    -                b"1234",  # data
    +                b'\xf3\xff\xff\xff',  # Length prefix (too big)
    +                b'\x80',  # subtype
    +                b'1234',  # data
                 ),
             ),
             """
    @@ -1172,15 +1172,15 @@ def disallowed_key(char: str, k: str) -> str:
             error=corruption_at(12),
         ),
         TestCase(
    -        "binary/old-invalid/1",
    +        'binary/old-invalid/1',
             doc(
                 elem(
    -                "binary",
    +                'binary',
                     Tag.Binary,
                     binary(
                         2,
                         i32le(5),  # Bad length prefix: Should be 4
    -                    b"abcd",
    +                    b'abcd',
                     ),
                 ),
             ),
    @@ -1191,14 +1191,14 @@ def disallowed_key(char: str, k: str) -> str:
             error=corruption_at(17),
         ),
         TestCase(
    -        "binary/old-invalid/2",
    +        'binary/old-invalid/2',
             doc(
                 elem(
    -                "bin",
    +                'bin',
                     Tag.Binary,
                     binary(
                         2,
    -                    b"abc",  # Bad: Subtype 2 requires at least four bytes
    +                    b'abc',  # Bad: Subtype 2 requires at least four bytes
                     ),
                 )
             ),
    @@ -1209,95 +1209,95 @@ def disallowed_key(char: str, k: str) -> str:
             error=corruption_at(9),
         ),
         TestCase(
    -        "minkey/simple",
    -        doc(elem("min", Tag.MinKey)),
    -        "A simple document with a MinKey element",
    +        'minkey/simple',
    +        doc(elem('min', Tag.MinKey)),
    +        'A simple document with a MinKey element',
         ),
         TestCase(
    -        "maxkey/simple",
    -        doc(elem("max", Tag.MaxKey)),
    -        "A simple document with a MaxKey element",
    +        'maxkey/simple',
    +        doc(elem('max', Tag.MaxKey)),
    +        'A simple document with a MaxKey element',
         ),
         TestCase(
    -        "int32/simple",
    -        doc(elem("int32", Tag.Int32, i32le(42))),
    -        "A simple document with a valid single int32 element",
    +        'int32/simple',
    +        doc(elem('int32', Tag.Int32, i32le(42))),
    +        'A simple document with a valid single int32 element',
         ),
         TestCase(
    -        "int32/truncated",
    -        doc(elem("int32-truncated", Tag.Int32, i32le(42)[:-1])),
    -        "Truncated 32-bit integer",
    +        'int32/truncated',
    +        doc(elem('int32-truncated', Tag.Int32, i32le(42)[:-1])),
    +        'Truncated 32-bit integer',
             error=corruption_at(21),
         ),
    -    TestCase("timestamp/simple", doc(elem("timestamp", Tag.Timestamp, i64le(1729))), """A simple timestamp element"""),
    +    TestCase('timestamp/simple', doc(elem('timestamp', Tag.Timestamp, i64le(1729))), """A simple timestamp element"""),
         TestCase(
    -        "timestamp/truncated",
    -        doc(elem("timestamp", Tag.Timestamp, i64le(1729)[:-1])),
    +        'timestamp/truncated',
    +        doc(elem('timestamp', Tag.Timestamp, i64le(1729)[:-1])),
             """A truncated timestamp element""",
             error=corruption_at(15),
         ),
         TestCase(
    -        "int64/simple",
    -        doc(elem("int64", Tag.Int64, i64le(1729))),
    -        "A simple document with a valid single int64 element",
    +        'int64/simple',
    +        doc(elem('int64', Tag.Int64, i64le(1729))),
    +        'A simple document with a valid single int64 element',
         ),
         TestCase(
    -        "int64/truncated",
    -        doc(elem("int64-truncated", Tag.Int64, i64le(1729)[:-1])),
    -        "Truncated 64-bit integer",
    +        'int64/truncated',
    +        doc(elem('int64-truncated', Tag.Int64, i64le(1729)[:-1])),
    +        'Truncated 64-bit integer',
             error=corruption_at(21),
         ),
         TestCase(
    -        "double/simple",
    -        doc(elem("double", Tag.Double, f64le(3.14))),
    -        "Simple float64 element",
    +        'double/simple',
    +        doc(elem('double', Tag.Double, f64le(3.14))),
    +        'Simple float64 element',
         ),
         TestCase(
    -        "double/truncated",
    -        doc(elem("double-truncated", Tag.Double, f64le(3.13)[:-1])),
    -        "Truncated 64-bit float",
    +        'double/truncated',
    +        doc(elem('double-truncated', Tag.Double, f64le(3.13)[:-1])),
    +        'Truncated 64-bit float',
             error=corruption_at(22),
         ),
         TestCase(
    -        "boolean/simple-false",
    -        doc(elem("bool", Tag.Boolean, b"\x00")),
    +        'boolean/simple-false',
    +        doc(elem('bool', Tag.Boolean, b'\x00')),
             """A simple boolean 'false'""",
         ),
         TestCase(
    -        "boolean/simple-true",
    -        doc(elem("bool", Tag.Boolean, b"\x01")),
    +        'boolean/simple-true',
    +        doc(elem('bool', Tag.Boolean, b'\x01')),
             """A simple boolean 'true'""",
         ),
         TestCase(
    -        "boolean/invalid",
    -        doc(elem("bool", Tag.Boolean, b"\xc3")),
    +        'boolean/invalid',
    +        doc(elem('bool', Tag.Boolean, b'\xc3')),
             """
             An invalid boolean octet. Must be '0' or '1', but is 0xc3.
             """,
             error=corruption_at(10),
         ),
         TestCase(
    -        "datetime/simple",
    -        doc(elem("utc", Tag.Datetime, b"\x0b\x98\x8c\x2b\x33\x01\x00\x00")),
    -        "Simple datetime element",
    +        'datetime/simple',
    +        doc(elem('utc', Tag.Datetime, b'\x0b\x98\x8c\x2b\x33\x01\x00\x00')),
    +        'Simple datetime element',
         ),
         TestCase(
    -        "datetime/truncated",
    -        doc(elem("utc", Tag.Datetime, b"\x0b\x98\x8c\x2b\x33\x01\x00")),
    -        "Truncated datetime element",
    +        'datetime/truncated',
    +        doc(elem('utc', Tag.Datetime, b'\x0b\x98\x8c\x2b\x33\x01\x00')),
    +        'Truncated datetime element',
             error=corruption_at(9),
         ),
         # DBRef
         TestCase(
    -        "dbref/missing-id",
    -        doc(utf8elem("$ref", "foo")),
    +        'dbref/missing-id',
    +        doc(utf8elem('$ref', 'foo')),
             """This dbref document is missing an $id element""",
             flags=BSON_VALIDATE_DOLLAR_KEYS,
             error=ErrorInfo(BSON_VALIDATE_DOLLAR_KEYS, MSG_EXPECTED_ID_FOLLOWING_REF, 18),
         ),
         TestCase(
    -        "dbref/non-id",
    -        doc(utf8elem("$ref", "foo"), utf8elem("bar", "baz")),
    +        'dbref/non-id',
    +        doc(utf8elem('$ref', 'foo'), utf8elem('bar', 'baz')),
             """
             The 'bar' element should be an '$id' element.
             """,
    @@ -1305,17 +1305,17 @@ def disallowed_key(char: str, k: str) -> str:
             error=ErrorInfo(BSON_VALIDATE_DOLLAR_KEYS, MSG_EXPECTED_ID_FOLLOWING_REF, 18),
         ),
         TestCase(
    -        "dbref/not-first-elements",
    -        doc(utf8elem("foo", "bar"), utf8elem("$ref", "a"), utf8elem("$id", "b")),
    +        'dbref/not-first-elements',
    +        doc(utf8elem('foo', 'bar'), utf8elem('$ref', 'a'), utf8elem('$id', 'b')),
             """
             This would be a valid DBRef, but the "$ref" key must come first.
             """,
             flags=BSON_VALIDATE_DOLLAR_KEYS,
    -        error=ErrorInfo(BSON_VALIDATE_DOLLAR_KEYS, disallowed_key("$", "$ref"), 17),
    +        error=ErrorInfo(BSON_VALIDATE_DOLLAR_KEYS, disallowed_key('$', '$ref'), 17),
         ),
         TestCase(
    -        "dbref/ref-without-id-with-db",
    -        doc(utf8elem("$ref", "foo"), utf8elem("$db", "bar")),
    +        'dbref/ref-without-id-with-db',
    +        doc(utf8elem('$ref', 'foo'), utf8elem('$db', 'bar')),
             """
             There should be an $id element, but we skip straight to $db
             """,
    @@ -1323,48 +1323,48 @@ def disallowed_key(char: str, k: str) -> str:
             error=ErrorInfo(BSON_VALIDATE_DOLLAR_KEYS, MSG_EXPECTED_ID_FOLLOWING_REF, 18),
         ),
         TestCase(
    -        "dbref/non-string-ref",
    -        doc(elem("$ref", Tag.Int32, i32le(42))),
    +        'dbref/non-string-ref',
    +        doc(elem('$ref', Tag.Int32, i32le(42))),
             """
             The $ref element must be a string, but is an integer.
             """,
             flags=BSON_VALIDATE_DOLLAR_KEYS,
    -        error=ErrorInfo(BSON_VALIDATE_DOLLAR_KEYS, "$ref element must be a UTF-8 element", 4),
    +        error=ErrorInfo(BSON_VALIDATE_DOLLAR_KEYS, '$ref element must be a UTF-8 element', 4),
         ),
         TestCase(
    -        "dbref/non-string-db",
    +        'dbref/non-string-db',
             doc(
    -            utf8elem("$ref", "foo"),
    -            utf8elem("$id", "bar"),
    -            elem("$db", Tag.Int32, i32le(42)),
    +            utf8elem('$ref', 'foo'),
    +            utf8elem('$id', 'bar'),
    +            elem('$db', Tag.Int32, i32le(42)),
             ),
             """
             The $db element should be a string, but is an integer.
             """,
             flags=BSON_VALIDATE_DOLLAR_KEYS,
    -        error=ErrorInfo(BSON_VALIDATE_DOLLAR_KEYS, "$db element in DBRef must be a UTF-8 element", 31),
    +        error=ErrorInfo(BSON_VALIDATE_DOLLAR_KEYS, '$db element in DBRef must be a UTF-8 element', 31),
         ),
         TestCase(
    -        "dbref/invalid-extras-between",
    +        'dbref/invalid-extras-between',
             doc(
    -            utf8elem("$ref", "foo"),
    -            utf8elem("$id", "bar"),
    -            utf8elem("extra", "field"),
    -            utf8elem("$db", "baz"),
    +            utf8elem('$ref', 'foo'),
    +            utf8elem('$id', 'bar'),
    +            utf8elem('extra', 'field'),
    +            utf8elem('$db', 'baz'),
             ),
             """
             Almost a valid DBRef, but there is an extra field before $db. We reject $db
             as an invalid key.
             """,
             flags=BSON_VALIDATE_DOLLAR_KEYS,
    -        error=ErrorInfo(BSON_VALIDATE_DOLLAR_KEYS, disallowed_key("$", "$db"), 48),
    +        error=ErrorInfo(BSON_VALIDATE_DOLLAR_KEYS, disallowed_key('$', '$db'), 48),
         ),
         TestCase(
    -        "dbref/invalid-double-ref",
    +        'dbref/invalid-double-ref',
             doc(
    -            utf8elem("$ref", "foo"),
    -            utf8elem("$ref", "bar"),
    -            utf8elem("$id", "baz"),
    +            utf8elem('$ref', 'foo'),
    +            utf8elem('$ref', 'bar'),
    +            utf8elem('$id', 'baz'),
             ),
             """
             Invalid DBRef contains a second $ref element.
    @@ -1373,25 +1373,25 @@ def disallowed_key(char: str, k: str) -> str:
             error=ErrorInfo(BSON_VALIDATE_DOLLAR_KEYS, MSG_EXPECTED_ID_FOLLOWING_REF, 18),
         ),
         TestCase(
    -        "dbref/invalid-missing-ref",
    -        doc(utf8elem("$id", "foo")),
    +        'dbref/invalid-missing-ref',
    +        doc(utf8elem('$id', 'foo')),
             """
             DBRef document requires a $ref key to be first.
             """,
             flags=BSON_VALIDATE_DOLLAR_KEYS,
    -        error=ErrorInfo(BSON_VALIDATE_DOLLAR_KEYS, disallowed_key("$", "$id"), 4),
    +        error=ErrorInfo(BSON_VALIDATE_DOLLAR_KEYS, disallowed_key('$', '$id'), 4),
         ),
         TestCase(
    -        "dbref/valid/simple",
    -        doc(utf8elem("$ref", "foo"), utf8elem("$id", "bar")),
    +        'dbref/valid/simple',
    +        doc(utf8elem('$ref', 'foo'), utf8elem('$id', 'bar')),
             """
             This is a simple valid DBRef element.
             """,
             flags=BSON_VALIDATE_DOLLAR_KEYS,
         ),
         TestCase(
    -        "dbref/valid/simple-with-db",
    -        doc(utf8elem("$ref", "foo"), utf8elem("$id", "bar"), utf8elem("$db", "baz")),
    +        'dbref/valid/simple-with-db',
    +        doc(utf8elem('$ref', 'foo'), utf8elem('$id', 'bar'), utf8elem('$db', 'baz')),
             """
             A simple DBRef of the form:
     
    @@ -1400,19 +1400,19 @@ def disallowed_key(char: str, k: str) -> str:
             flags=BSON_VALIDATE_DOLLAR_KEYS,
         ),
         TestCase(
    -        "dbref/valid/nested-id-doc",
    +        'dbref/valid/nested-id-doc',
             doc(
    -            utf8elem("$ref", "foo"),
    +            utf8elem('$ref', 'foo'),
                 elem(
    -                "$id",
    +                '$id',
                     Tag.Document,
                     doc(
    -                    utf8elem("$ref", "foo2"),
    -                    utf8elem("$id", "bar2"),
    -                    utf8elem("$db", "baz2"),
    +                    utf8elem('$ref', 'foo2'),
    +                    utf8elem('$id', 'bar2'),
    +                    utf8elem('$db', 'baz2'),
                     ),
                 ),
    -            utf8elem("$db", "baz"),
    +            utf8elem('$db', 'baz'),
             ),
             """
             This is a valid DBRef of the form:
    @@ -1422,12 +1422,12 @@ def disallowed_key(char: str, k: str) -> str:
             flags=BSON_VALIDATE_DOLLAR_KEYS,
         ),
         TestCase(
    -        "dbref/valid/trailing-content",
    +        'dbref/valid/trailing-content',
             doc(
    -            utf8elem("$ref", "foo"),
    -            utf8elem("$id", "bar"),
    -            utf8elem("$db", "baz"),
    -            utf8elem("extra", "field"),
    +            utf8elem('$ref', 'foo'),
    +            utf8elem('$id', 'bar'),
    +            utf8elem('$db', 'baz'),
    +            utf8elem('extra', 'field'),
             ),
             """
             A valid DBRef of the form:
    @@ -1442,11 +1442,11 @@ def disallowed_key(char: str, k: str) -> str:
             flags=BSON_VALIDATE_DOLLAR_KEYS,
         ),
         TestCase(
    -        "dbref/valid/trailing-content-no-db",
    +        'dbref/valid/trailing-content-no-db',
             doc(
    -            utf8elem("$ref", "foo"),
    -            utf8elem("$id", "bar"),
    -            utf8elem("extra", "field"),
    +            utf8elem('$ref', 'foo'),
    +            utf8elem('$id', 'bar'),
    +            utf8elem('extra', 'field'),
             ),
             """
             A valid DBRef of the form:
    @@ -1461,7 +1461,7 @@ def disallowed_key(char: str, k: str) -> str:
         ),
     ]
     
    -if __name__ == "__main__":
    +if __name__ == '__main__':
         # We don't take an arguments, but error if any are given
         parser = argparse.ArgumentParser(description=__doc__)
         parser.parse_args()
    @@ -1471,11 +1471,11 @@ def disallowed_key(char: str, k: str) -> str:
         for c in CASES:
             print()
             for part in generate(c):
    -            print(part, end="")
    +            print(part, end='')
     
         # Print the registration function
    -    print(f"\n{GENERATED_NOTE}")
    -    print("void test_install_generated_bson_validation(TestSuite* suite) {")
    +    print(f'\n{GENERATED_NOTE}')
    +    print('void test_install_generated_bson_validation(TestSuite* suite) {')
         for c in CASES:
             print(f'  TestSuite_Add(suite, "/bson/validate/" {json.dumps(c.name)}, {c.fn_name});')
    -    print("}")
    +    print('}')
    diff --git a/src/libmongoc/doc/cmakerefdomain.py b/src/libmongoc/doc/cmakerefdomain.py
    index ef9bf2ba4eb..151b503bc1a 100644
    --- a/src/libmongoc/doc/cmakerefdomain.py
    +++ b/src/libmongoc/doc/cmakerefdomain.py
    @@ -8,38 +8,40 @@
     replaced by a more full-featured extension.
     
     """
    +
     from typing import Any, List
    +
     from sphinx.application import Sphinx
    -from sphinx.roles import XRefRole
     from sphinx.domains import Domain, ObjType
    +from sphinx.roles import XRefRole
     
     kinds = [
    -    "command",
    -    "cpack_gen",
    -    "envvar",
    -    "generator",
    -    "genex",
    -    "guide",
    -    "variable",
    -    "module",
    -    "policy",
    -    "prop_cache",
    -    "prop_dir",
    -    "prop_gbl",
    -    "prop_inst",
    -    "prop_sf",
    -    "prop_test",
    -    "prop_tgt",
    -    "manual",
    +    'command',
    +    'cpack_gen',
    +    'envvar',
    +    'generator',
    +    'genex',
    +    'guide',
    +    'variable',
    +    'module',
    +    'policy',
    +    'prop_cache',
    +    'prop_dir',
    +    'prop_gbl',
    +    'prop_inst',
    +    'prop_sf',
    +    'prop_test',
    +    'prop_tgt',
    +    'manual',
     ]
     
     
     class CMakeRefDomain(Domain):
    -    name = "cmake"
    -    label = "CMake (Minimal)"
    +    name = 'cmake'
    +    label = 'CMake (Minimal)'
         object_types = {k: ObjType(k, k) for k in kinds}
         roles = {k: XRefRole() for k in kinds}
    -    roles["command"] = XRefRole(fix_parens=True)
    +    roles['command'] = XRefRole(fix_parens=True)
         directives = {}
         initial_data: Any = {}
     
    @@ -51,6 +53,6 @@ def merge_domaindata(self, docnames: List[str], otherdata: Any) -> None:
     def setup(app: Sphinx):
         app.add_domain(CMakeRefDomain)
         return {
    -        "parallel_read_safe": True,
    -        "parallel_write_safe": True,
    +        'parallel_read_safe': True,
    +        'parallel_write_safe': True,
         }
    diff --git a/src/libmongoc/doc/conf.py b/src/libmongoc/doc/conf.py
    index 072fa2d7308..009c33670fe 100644
    --- a/src/libmongoc/doc/conf.py
    +++ b/src/libmongoc/doc/conf.py
    @@ -12,10 +12,9 @@
         # Try importing from older Sphinx version path.
         from sphinx.builders.html import DirectoryHTMLBuilder
     
    -from docutils.parsers.rst import directives, Directive
    +from docutils.parsers.rst import Directive, directives
     from sphinx.application import Sphinx
     from sphinx.application import logger as sphinx_log
    -from sphinx.config import Config
     
     has_sphinx_design = False
     try:
    @@ -23,6 +22,7 @@
         # sphinx-design is not required for building man pages.
         # python-sphinx-design is not currently available on EPEL. The package for EPEL includes man pages.
         from sphinx_design.dropdown import DropdownDirective
    +
         has_sphinx_design = True
     except ImportError:
         pass
    @@ -30,40 +30,40 @@
     # Ensure we can import "mongoc" extension module.
     this_path = os.path.dirname(__file__)
     sys.path.append(this_path)
    -sys.path.append(os.path.normpath(os.path.join(this_path, "../../../build/sphinx")))
    +sys.path.append(os.path.normpath(os.path.join(this_path, '../../../build/sphinx')))
     
    -from mongoc_common import *
    +from mongoc_common import *  # noqa: E402, F403
     
     extensions = [
    -    "mongoc",
    -    "sphinx.ext.intersphinx",
    +    'mongoc',
    +    'sphinx.ext.intersphinx',
         # NOTE: We use our own "minimal" CMake domain that lets us refer to external
         # objects from the CMake inventory, but provides no other features. The
         # build *could* otherwise use sphinxcontrib-moderncmakedomain, which is
         # more full-featured, but it is not (currently) available in repositories for
         # package building.
         # "sphinxcontrib.moderncmakedomain",
    -    "cmakerefdomain",
    -    "sphinx.ext.mathjax",
    +    'cmakerefdomain',
    +    'sphinx.ext.mathjax',
     ]
     
     if has_sphinx_design:
    -    extensions.append("sphinx_design")
    +    extensions.append('sphinx_design')
     
     # General information about the project.
    -project = "libmongoc"
    -copyright = "2009-present, MongoDB, Inc."
    -author = "MongoDB, Inc"
    +project = 'libmongoc'
    +copyright = '2009-present, MongoDB, Inc.'
    +author = 'MongoDB, Inc'
     
    -version_path = os.path.join(os.path.dirname(__file__), "../../..", "VERSION_CURRENT")
    +version_path = os.path.join(os.path.dirname(__file__), '../../..', 'VERSION_CURRENT')
     version = open(version_path).read().strip()
     
     # The extension requires the "base" to contain '%s' exactly once, but we never intend to use it though
     
    -language = "en"
    -exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
    -master_doc = "index"
    -html_static_path = ["static"]
    +language = 'en'
    +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
    +master_doc = 'index'
    +html_static_path = ['static']
     
     # Set an empty list of disabled reftypes.
     # Sphinx 5.0 disables "std:doc" by default.
    @@ -73,13 +73,13 @@
     intersphinx_timeout = 30
     
     intersphinx_mapping = {
    -    "sphinx": ("https://www.sphinx-doc.org/en/master", "includes/sphinx.inv"),
    -    "python": ("https://docs.python.org/3", "includes/python.inv"),
    -    "bson": ("https://www.mongoc.org/libbson/current", "includes/libbson.inv"),
    -    "cmake": ("https://cmake.org/cmake/help/latest", "includes/cmake.inv"),
    +    'sphinx': ('https://www.sphinx-doc.org/en/master', 'includes/sphinx.inv'),
    +    'python': ('https://docs.python.org/3', 'includes/python.inv'),
    +    'bson': ('https://www.mongoc.org/libbson/current', 'includes/libbson.inv'),
    +    'cmake': ('https://cmake.org/cmake/help/latest', 'includes/cmake.inv'),
     }
     
    -_UPDATE_KEY = "update_external_inventories"
    +_UPDATE_KEY = 'update_external_inventories'
     
     
     def _maybe_update_inventories(app: Sphinx):
    @@ -91,30 +91,30 @@ def _maybe_update_inventories(app: Sphinx):
         This function will only have an effect if the appropriate command-line config
         value is defined.
         """
    -    prefix = "[libmongoc/doc/conf.py]"
    +    prefix = '[libmongoc/doc/conf.py]'
         config = app.config
         if not config[_UPDATE_KEY]:
             sphinx_log.info(
    -            "%s Using existing intersphinx inventories. Refresh by running with ‘-D %s=1’",
    +            '%s Using existing intersphinx inventories. Refresh by running with ‘-D %s=1’',
                 prefix,
                 _UPDATE_KEY,
             )
             return
         for name, tup in intersphinx_mapping.items():
             urlbase, filename = tup
    -        url = f"{urlbase}/objects.inv"
    -        sphinx_log.info("%s Downloading external inventory for %s from [%s]", prefix, name, url)
    +        url = f'{urlbase}/objects.inv'
    +        sphinx_log.info('%s Downloading external inventory for %s from [%s]', prefix, name, url)
             with urllib.request.urlopen(url) as req:
                 req: http.client.HTTPResponse = req
                 dest = Path(app.srcdir) / filename
    -            sphinx_log.info("%s Saving inventory [%s] to file [%s]", prefix, url, dest)
    -            with dest.open("wb") as out:
    +            sphinx_log.info('%s Saving inventory [%s] to file [%s]', prefix, url, dest)
    +            with dest.open('wb') as out:
                     buf = req.read(1024 * 4)
                     while buf:
                         out.write(buf)
                         buf = req.read(1024 * 4)
             sphinx_log.info(
    -            "%s Inventory file [%s] was updated. Commit the result to save it for subsequent builds.",
    +            '%s Inventory file [%s] was updated. Commit the result to save it for subsequent builds.',
                 prefix,
                 dest,
             )
    @@ -122,8 +122,8 @@ def _maybe_update_inventories(app: Sphinx):
     
     # -- Options for HTML output ----------------------------------------------
     
    -html_theme = "furo"
    -html_title = html_shorttitle = "libmongoc %s" % version
    +html_theme = 'furo'
    +html_title = html_shorttitle = 'libmongoc %s' % version
     # html_favicon = None
     html_use_index = True
     
    @@ -232,50 +232,60 @@ def _maybe_update_inventories(app: Sphinx):
     def add_canonical_link(app: Sphinx, pagename: str, templatename: str, context: Dict[str, Any], doctree: Any):
         link = f''
     
    -    context["metatags"] = context.get("metatags", "") + link
    +    context['metatags'] = context.get('metatags', '') + link
     
     
     if has_sphinx_design:
    +
         class AdDropdown(DropdownDirective):
             """A sphinx-design dropdown that can also be an admonition."""
     
    -        option_spec = DropdownDirective.option_spec | {"admonition": directives.unchanged_required}
    +        option_spec = DropdownDirective.option_spec | {'admonition': directives.unchanged_required}
     
             def run(self):
    -            adm = self.options.get("admonition")
    +            adm = self.options.get('admonition')
                 if adm is not None:
    -                self.options.setdefault("class-container", []).extend(("admonition", adm))
    -                self.options.setdefault("class-title", []).append(f"admonition-title")
    +                self.options.setdefault('class-container', []).extend(('admonition', adm))
    +                self.options.setdefault('class-title', []).append('admonition-title')
                 return super().run()
     else:
    +
         class EmptyDirective(Directive):
             has_content = True
    +
             def run(self):
                 return []
    -        
    +
    +
     has_add_css_file = True
    -        
    -def check_html_builder_requirements (app):
    +
    +
    +def check_html_builder_requirements(app):
         if isinstance(app.builder, DirectoryHTMLBuilder):
             if not has_sphinx_design:
    -            raise RuntimeError("The sphinx-design package is required to build HTML documentation but was not detected. Install sphinx-design.")
    +            raise RuntimeError(
    +                'The sphinx-design package is required to build HTML documentation but was not detected. Install sphinx-design.'
    +            )
             if not has_add_css_file:
    -            raise RuntimeError("A newer version of Sphinx is required to build HTML documentation with CSS files. Upgrade Sphinx to v3.5.0 or newer")
    +            raise RuntimeError(
    +                'A newer version of Sphinx is required to build HTML documentation with CSS files. Upgrade Sphinx to v3.5.0 or newer'
    +            )
    +
     
     def setup(app: Sphinx):
    -    mongoc_common_setup(app)
    -    app.connect("builder-inited", check_html_builder_requirements)
    +    mongoc_common_setup(app)  # noqa: F405
    +    app.connect('builder-inited', check_html_builder_requirements)
         if has_sphinx_design:
    -        app.add_directive("ad-dropdown", AdDropdown)
    +        app.add_directive('ad-dropdown', AdDropdown)
         else:
    -        app.add_directive("ad-dropdown", EmptyDirective)
    -        app.add_directive("tab-set", EmptyDirective)
    -    app.connect("html-page-context", add_canonical_link)
    -    if hasattr(app, "add_css_file"):
    -        app.add_css_file("styles.css")
    +        app.add_directive('ad-dropdown', EmptyDirective)
    +        app.add_directive('tab-set', EmptyDirective)
    +    app.connect('html-page-context', add_canonical_link)
    +    if hasattr(app, 'add_css_file'):
    +        app.add_css_file('styles.css')
         else:
             global has_add_css_file
             has_add_css_file = False
    -        
    -    app.connect("builder-inited", _maybe_update_inventories)
    +
    +    app.connect('builder-inited', _maybe_update_inventories)
         app.add_config_value(_UPDATE_KEY, default=False, rebuild=True, types=[bool])
    diff --git a/src/libmongoc/examples/parse_handshake_cfg.py b/src/libmongoc/examples/parse_handshake_cfg.py
    index cb55dd44e0c..77bf5123211 100644
    --- a/src/libmongoc/examples/parse_handshake_cfg.py
    +++ b/src/libmongoc/examples/parse_handshake_cfg.py
    @@ -3,61 +3,63 @@
     # `MD_FLAGS` maps the flag to its bit position.
     # The bit positions must match those defined in src/mongoc/mongoc-handshake-private.h
     MD_FLAGS = {
    -    "MONGOC_MD_FLAG_ENABLE_CRYPTO": 0,
    -    "MONGOC_MD_FLAG_ENABLE_CRYPTO_CNG": 1,
    -    "MONGOC_MD_FLAG_ENABLE_CRYPTO_COMMON_CRYPTO": 2,
    -    "MONGOC_MD_FLAG_ENABLE_CRYPTO_LIBCRYPTO": 3,
    -    "MONGOC_MD_FLAG_ENABLE_CRYPTO_SYSTEM_PROFILE": 4,
    -    "MONGOC_MD_FLAG_ENABLE_SASL": 5,
    -    "MONGOC_MD_FLAG_ENABLE_SSL": 6,
    -    "MONGOC_MD_FLAG_ENABLE_SSL_OPENSSL": 7,
    -    "MONGOC_MD_FLAG_ENABLE_SSL_SECURE_CHANNEL": 8,
    -    "MONGOC_MD_FLAG_ENABLE_SSL_SECURE_TRANSPORT": 9,
    -    "MONGOC_MD_FLAG_EXPERIMENTAL_FEATURES": 10,
    -    "MONGOC_MD_FLAG_HAVE_SASL_CLIENT_DONE": 11,
    -    "MONGOC_MD_FLAG_HAVE_WEAK_SYMBOLS": 12,
    -    "MONGOC_MD_FLAG_NO_AUTOMATIC_GLOBALS": 13, # Removed in CDRIVER-1330.
    -    "MONGOC_MD_FLAG_ENABLE_SSL_LIBRESSL": 14, # Removed in CDRIVER-5693.
    -    "MONGOC_MD_FLAG_ENABLE_SASL_CYRUS": 15,
    -    "MONGOC_MD_FLAG_ENABLE_SASL_SSPI": 16,
    -    "MONGOC_MD_FLAG_HAVE_SOCKLEN": 17,
    -    "MONGOC_MD_FLAG_ENABLE_COMPRESSION": 18,
    -    "MONGOC_MD_FLAG_ENABLE_COMPRESSION_SNAPPY": 19,
    -    "MONGOC_MD_FLAG_ENABLE_COMPRESSION_ZLIB": 20,
    -    "MONGOC_MD_FLAG_ENABLE_SASL_GSSAPI": 21,
    -    "MONGOC_MD_FLAG_ENABLE_RES_NSEARCH": 22,
    -    "MONGOC_MD_FLAG_ENABLE_RES_NDESTROY": 23,
    -    "MONGOC_MD_FLAG_ENABLE_RES_NCLOSE": 24,
    -    "MONGOC_MD_FLAG_ENABLE_RES_SEARCH": 25,
    -    "MONGOC_MD_FLAG_ENABLE_DNSAPI": 26,
    -    "MONGOC_MD_FLAG_ENABLE_RDTSCP": 27,
    -    "MONGOC_MD_FLAG_HAVE_SCHED_GETCPU": 28,
    -    "MONGOC_MD_FLAG_ENABLE_SHM_COUNTERS": 29,
    -    "MONGOC_MD_FLAG_TRACE": 30,
    +    'MONGOC_MD_FLAG_ENABLE_CRYPTO': 0,
    +    'MONGOC_MD_FLAG_ENABLE_CRYPTO_CNG': 1,
    +    'MONGOC_MD_FLAG_ENABLE_CRYPTO_COMMON_CRYPTO': 2,
    +    'MONGOC_MD_FLAG_ENABLE_CRYPTO_LIBCRYPTO': 3,
    +    'MONGOC_MD_FLAG_ENABLE_CRYPTO_SYSTEM_PROFILE': 4,
    +    'MONGOC_MD_FLAG_ENABLE_SASL': 5,
    +    'MONGOC_MD_FLAG_ENABLE_SSL': 6,
    +    'MONGOC_MD_FLAG_ENABLE_SSL_OPENSSL': 7,
    +    'MONGOC_MD_FLAG_ENABLE_SSL_SECURE_CHANNEL': 8,
    +    'MONGOC_MD_FLAG_ENABLE_SSL_SECURE_TRANSPORT': 9,
    +    'MONGOC_MD_FLAG_EXPERIMENTAL_FEATURES': 10,
    +    'MONGOC_MD_FLAG_HAVE_SASL_CLIENT_DONE': 11,
    +    'MONGOC_MD_FLAG_HAVE_WEAK_SYMBOLS': 12,
    +    'MONGOC_MD_FLAG_NO_AUTOMATIC_GLOBALS': 13,  # Removed in CDRIVER-1330.
    +    'MONGOC_MD_FLAG_ENABLE_SSL_LIBRESSL': 14,  # Removed in CDRIVER-5693.
    +    'MONGOC_MD_FLAG_ENABLE_SASL_CYRUS': 15,
    +    'MONGOC_MD_FLAG_ENABLE_SASL_SSPI': 16,
    +    'MONGOC_MD_FLAG_HAVE_SOCKLEN': 17,
    +    'MONGOC_MD_FLAG_ENABLE_COMPRESSION': 18,
    +    'MONGOC_MD_FLAG_ENABLE_COMPRESSION_SNAPPY': 19,
    +    'MONGOC_MD_FLAG_ENABLE_COMPRESSION_ZLIB': 20,
    +    'MONGOC_MD_FLAG_ENABLE_SASL_GSSAPI': 21,
    +    'MONGOC_MD_FLAG_ENABLE_RES_NSEARCH': 22,
    +    'MONGOC_MD_FLAG_ENABLE_RES_NDESTROY': 23,
    +    'MONGOC_MD_FLAG_ENABLE_RES_NCLOSE': 24,
    +    'MONGOC_MD_FLAG_ENABLE_RES_SEARCH': 25,
    +    'MONGOC_MD_FLAG_ENABLE_DNSAPI': 26,
    +    'MONGOC_MD_FLAG_ENABLE_RDTSCP': 27,
    +    'MONGOC_MD_FLAG_HAVE_SCHED_GETCPU': 28,
    +    'MONGOC_MD_FLAG_ENABLE_SHM_COUNTERS': 29,
    +    'MONGOC_MD_FLAG_TRACE': 30,
         # `MONGOC_MD_FLAG_ENABLE_ICU` was accidentally removed in libmongoc 1.25.0-1.25.3.
         # If parsing a config-bitfield produced by libmongoc 1.25.0-1.25.3, use the version of `parse_handshake_cfg.py` from the git tag 1.25.0.
    -    "MONGOC_MD_FLAG_ENABLE_ICU": 31,
    -    "MONGOC_MD_FLAG_ENABLE_CLIENT_SIDE_ENCRYPTION": 32,
    -    "MONGOC_MD_FLAG_ENABLE_MONGODB_AWS_AUTH": 33,
    -    "MONGOC_MD_FLAG_ENABLE_SRV": 34,
    -    "MONGOC_MD_FLAG_HAVE_BCRYPT_PBKDF2": 35,
    +    'MONGOC_MD_FLAG_ENABLE_ICU': 31,
    +    'MONGOC_MD_FLAG_ENABLE_CLIENT_SIDE_ENCRYPTION': 32,
    +    'MONGOC_MD_FLAG_ENABLE_MONGODB_AWS_AUTH': 33,
    +    'MONGOC_MD_FLAG_ENABLE_SRV': 34,
    +    'MONGOC_MD_FLAG_HAVE_BCRYPT_PBKDF2': 35,
     }
     
    +
     def main():
    -    flag_to_number = {s: 2 ** i for s,i in MD_FLAGS.items()}
    +    flag_to_number = {s: 2**i for s, i in MD_FLAGS.items()}
     
         if len(sys.argv) < 2:
    -        print ("Usage: python {0} config-bitfield".format(sys.argv[0]))
    -        print ("Example: python parse_handshake_cfg.py 0x3e65")
    +        print('Usage: python {0} config-bitfield'.format(sys.argv[0]))
    +        print('Example: python parse_handshake_cfg.py 0x3e65')
             return
     
         config_bitfield_string = sys.argv[1]
         config_bitfield_num = int(config_bitfield_string, 0)
    -    print ("Decimal value: {}".format(config_bitfield_num))
    +    print('Decimal value: {}'.format(config_bitfield_num))
     
         for flag, num in flag_to_number.items():
    -        v = "true" if config_bitfield_num & num else "false"
    -        print ("{:<50}: {}".format(flag, v))
    +        v = 'true' if config_bitfield_num & num else 'false'
    +        print('{:<50}: {}'.format(flag, v))
    +
     
    -if __name__ == "__main__":
    +if __name__ == '__main__':
         main()
    diff --git a/tools/base.sh b/tools/base.sh
    index d3fd8a18a44..5b0625b4d73 100755
    --- a/tools/base.sh
    +++ b/tools/base.sh
    @@ -21,50 +21,49 @@
     # * exists 
     #     • Return zero if  names a file, directory, or either, respectively.
     
    -
     set -o errexit
     set -o pipefail
     set -o nounset
     
     is-set() {
    -    [[ -n ${!1+x} ]]
    +  [[ -n ${!1+x} ]]
     }
     
     log() {
    -    echo "${@}" 1>&2
    -    return 0
    +  echo "${@}" 1>&2
    +  return 0
     }
     
     debug() {
    -    if [[ "${PRINT_DEBUG_LOGS:-0}" != "0" ]]; then
    -        log "${@}"
    -    fi
    +  if [[ "${PRINT_DEBUG_LOGS:-0}" != "0" ]]; then
    +    log "${@}"
    +  fi
     }
     
     fail() {
    -    log "${@}"
    -    return 1
    +  log "${@}"
    +  return 1
     }
     
     run-chdir() {
    -    [[ "$#" -gt 1 ]] || fail "run-chdir expects at least two arguments"
    -    local _dir="$1"
    -    shift
    -    pushd "$_dir" > /dev/null
    -    debug "Run in directory [$_dir]:" "$@"
    -    "$@"
    -    local _rc=$?
    -    popd > /dev/null
    -    return $_rc
    +  [[ "$#" -gt 1 ]] || fail "run-chdir expects at least two arguments"
    +  local _dir="$1"
    +  shift
    +  pushd "$_dir" >/dev/null
    +  debug "Run in directory [$_dir]:" "$@"
    +  "$@"
    +  local _rc=$?
    +  popd >/dev/null
    +  return $_rc
     }
     
    -is-file() { [[ -f "$1" ]];}
    -is-dir() { [[ -d "$1" ]];}
    -exists() { [[ -e "$1" ]];}
    +is-file() { [[ -f "$1" ]]; }
    +is-dir() { [[ -d "$1" ]]; }
    +exists() { [[ -e "$1" ]]; }
     
     have-command() {
    -    [[ "$#" -eq 1 ]] || fail "have-command expects a single argument"
    -    type "$1" > /dev/null 2>&1
    +  [[ "$#" -eq 1 ]] || fail "have-command expects a single argument"
    +  type "$1" >/dev/null 2>&1
     }
     
     # Inhibit msys path conversion
    diff --git a/tools/download.sh b/tools/download.sh
    index 64d5e13cdd8..9467bf9789f 100755
    --- a/tools/download.sh
    +++ b/tools/download.sh
    @@ -12,77 +12,77 @@
     . "$(dirname "${BASH_SOURCE[0]}")/use.sh" base
     
     download-file() {
    -    declare uri
    -    declare out
    -    unset no_tls_verify
    -    while [[ "$#" != 0 ]]; do
    -        case "$1" in
    -            --uri)
    -                shift
    -                uri=$1
    -                ;;
    -            --uri=*)
    -                uri="${1#--uri=*}"
    -                ;;
    -            --out)
    -                shift
    -                out=$1
    -                ;;
    -            --out=*)
    -                out=${1#--out=*}
    -                ;;
    -            --no-tls-verify)
    -                # shellcheck disable=2034
    -                no_tls_verify=1
    -                ;;
    -            *)
    -                fail "Unknown argument: $1"
    -        esac
    -        shift
    -    done
    -    if ! is-set uri || ! is-set out; then
    -        fail "download-file requires --uri= and --out= arguments"
    -        return
    -    fi
    -    debug "Download [$uri] to [$out]"
    +  declare uri
    +  declare out
    +  unset no_tls_verify
    +  while [[ "$#" != 0 ]]; do
    +    case "$1" in
    +    --uri)
    +      shift
    +      uri=$1
    +      ;;
    +    --uri=*)
    +      uri="${1#--uri=*}"
    +      ;;
    +    --out)
    +      shift
    +      out=$1
    +      ;;
    +    --out=*)
    +      out=${1#--out=*}
    +      ;;
    +    --no-tls-verify)
    +      # shellcheck disable=2034
    +      no_tls_verify=1
    +      ;;
    +    *)
    +      fail "Unknown argument: $1"
    +      ;;
    +    esac
    +    shift
    +  done
    +  if ! is-set uri || ! is-set out; then
    +    fail "download-file requires --uri= and --out= arguments"
    +    return
    +  fi
    +  debug "Download [$uri] to [$out]"
     
    -    if have-command curl; then
    -        curl_argv=(
    -            --silent
    -            --show-error
    -            --fail
    -            --retry 5
    -            --max-time 120
    -            --location  # (Follow redirects)
    -            --output "$out"
    -        )
    -        if is-set no_tls_verify; then
    -            curl_argv+=(--insecure)
    -        fi
    -        curl_argv+=(-- "$uri")
    -        debug "Execute curl command: [curl ${curl_argv[*]}]"
    -        output=$(curl "${curl_argv[@]}") || fail "$output" || return
    -        debug "$output"
    -    elif have-command wget; then
    -        wget_argv=(
    -            --output-document="$out"
    -            --tries=5
    -            --timeout=120
    -        )
    -        if is-set no_tls_verify; then
    -            wget_argv+=(--no-check-certificate)
    -        fi
    -        wget_argv+=(-- "$uri")
    -        debug "Execute wget command: [wget ${wget_argv[*]}]"
    -        output=$(wget "${wget_argv[@]}" 2>&1) || fail "wget failed: $output" || return
    -        debug "$output"
    -    else
    -        fail "This script requires either curl or wget to be available" || return
    +  if have-command curl; then
    +    curl_argv=(
    +      --silent
    +      --show-error
    +      --fail
    +      --retry 5
    +      --max-time 120
    +      --location # (Follow redirects)
    +      --output "$out"
    +    )
    +    if is-set no_tls_verify; then
    +      curl_argv+=(--insecure)
    +    fi
    +    curl_argv+=(-- "$uri")
    +    debug "Execute curl command: [curl ${curl_argv[*]}]"
    +    output=$(curl "${curl_argv[@]}") || fail "$output" || return
    +    debug "$output"
    +  elif have-command wget; then
    +    wget_argv=(
    +      --output-document="$out"
    +      --tries=5
    +      --timeout=120
    +    )
    +    if is-set no_tls_verify; then
    +      wget_argv+=(--no-check-certificate)
         fi
    -    debug "Download [$uri] to [$out] - Done"
    +    wget_argv+=(-- "$uri")
    +    debug "Execute wget command: [wget ${wget_argv[*]}]"
    +    output=$(wget "${wget_argv[@]}" 2>&1) || fail "wget failed: $output" || return
    +    debug "$output"
    +  else
    +    fail "This script requires either curl or wget to be available" || return
    +  fi
    +  debug "Download [$uri] to [$out] - Done"
     }
     
    -
     if is-main; then
    -    download-file "$@"
    +  download-file "$@"
     fi
    diff --git a/tools/earthly.sh b/tools/earthly.sh
    index 14389c40e1b..34bf8eed373 100755
    --- a/tools/earthly.sh
    +++ b/tools/earthly.sh
    @@ -8,16 +8,16 @@ set -euo pipefail
     
     # Calc the arch of the executable we want
     case "$ARCHNAME" in
    -    x64)
    -        arch=amd64
    -        ;;
    -    arm64)
    -        arch=arm64
    -        ;;
    -    *)
    -        echo "Unsupported architecture for automatic Earthly download: $HOSTTYPE" 1>&1
    -        exit 99
    -        ;;
    +x64)
    +  arch=amd64
    +  ;;
    +arm64)
    +  arch=arm64
    +  ;;
    +*)
    +  echo "Unsupported architecture for automatic Earthly download: $HOSTTYPE" 1>&1
    +  exit 99
    +  ;;
     esac
     
     # The location where the Earthly executable will live
    @@ -29,16 +29,16 @@ EARTHLY_EXE="$cache_dir/$exe_filename"
     
     # Download if it isn't already present
     if ! is-file "$EARTHLY_EXE"; then
    -    echo "Downloading $exe_filename $EARTHLY_VERSION"
    -    url="https://github.com/earthly/earthly/releases/download/v$EARTHLY_VERSION/$exe_filename"
    -    curl --retry 5 -LsS --max-time 120 --fail "$url" --output "$EARTHLY_EXE"
    -    chmod a+x "$EARTHLY_EXE"
    +  echo "Downloading $exe_filename $EARTHLY_VERSION"
    +  url="https://github.com/earthly/earthly/releases/download/v$EARTHLY_VERSION/$exe_filename"
    +  curl --retry 5 -LsS --max-time 120 --fail "$url" --output "$EARTHLY_EXE"
    +  chmod a+x "$EARTHLY_EXE"
     fi
     
     run-earthly() {
    -    "$EARTHLY_EXE" "$@"
    +  "$EARTHLY_EXE" "$@"
     }
     
     if is-main; then
    -    run-earthly "$@"
    +  run-earthly "$@"
     fi
    diff --git a/tools/format.py b/tools/format.py
    index 767270f34ca..4b2f75fff29 100644
    --- a/tools/format.py
    +++ b/tools/format.py
    @@ -14,7 +14,6 @@
     import functools
     import itertools
     import multiprocessing
    -import os
     import re
     import subprocess
     import sys
    @@ -32,30 +31,30 @@ def main(argv: Sequence[str]) -> int:
         # By default, add two jobs to the CPU count since some work is waiting on disk
         dflt_jobs = multiprocessing.cpu_count() + 2
         parser.add_argument(
    -        "--jobs",
    -        "-j",
    +        '--jobs',
    +        '-j',
             type=int,
    -        help=f"Number of parallel jobs to run (default: {dflt_jobs})",
    -        metavar="",
    +        help=f'Number of parallel jobs to run (default: {dflt_jobs})',
    +        metavar='',
             default=dflt_jobs,
         )
         parser.add_argument(
    -        "--mode",
    +        '--mode',
             choices=RunMode.__args__,
    -        help="Whether to apply changes, or simply check for formatting violations (default: apply)",
    -        default="apply",
    +        help='Whether to apply changes, or simply check for formatting violations (default: apply)',
    +        default='apply',
         )
         parser.add_argument(
    -        "--clang-format-bin",
    -        help="The clang-format executable to be used (default: “clang-format”)",
    -        default="clang-format",
    -        metavar="",
    +        '--clang-format-bin',
    +        help='The clang-format executable to be used (default: “clang-format”)',
    +        default='clang-format',
    +        metavar='',
         )
         parser.add_argument(
    -        "files",
    -        metavar="",
    -        nargs="*",
    -        help="List of files to be selected for formatting. If omitted, the default set of files are selected",
    +        'files',
    +        metavar='',
    +        nargs='*',
    +        help='List of files to be selected for formatting. If omitted, the default set of files are selected',
         )
         args = parser.parse_args(argv)
         mode: RunMode = args.mode
    @@ -70,13 +69,13 @@ def main(argv: Sequence[str]) -> int:
                 case patterns:
                     files = [Path(p).resolve() for p in patterns]
         except Exception as e:
    -        raise RuntimeError("Failed to collect files for formatting (See above)") from e
    +        raise RuntimeError('Failed to collect files for formatting (See above)') from e
         # Fail if no files matched
         assert files
         # Split the file list into groups to be dispatched
         num_jobs: int = min(args.jobs, len(files))
         groups = [files[n::num_jobs] for n in range(num_jobs)]
    -    print(f"Formatting {len(files)} files with {num_jobs} workers...", file=sys.stderr)
    +    print(f'Formatting {len(files)} files with {num_jobs} workers...', file=sys.stderr)
     
         # Bind the formatting arguments to the formatter function
         format_group = functools.partial(_format_files, mode=mode, clang_format=cf)
    @@ -86,14 +85,14 @@ def main(argv: Sequence[str]) -> int:
         try:
             okay = all(pool.map(format_group, groups))
         except Exception as e:
    -        raise RuntimeError("Unexpected error while formatting files (See above)") from e
    +        raise RuntimeError('Unexpected error while formatting files (See above)') from e
         if not okay:
             return 1
         return 0
     
     
    -RunMode = Literal["apply", "check"]
    -"Whether we should apply changes, or just check for violations"
    +RunMode = Literal['apply', 'check']
    +'Whether we should apply changes, or just check for violations'
     
     #: This regex tells us which #include directives should be modified to use angle brackets
     #: The regex is written to preserve whitespace and surrounding context. re.VERBOSE
    @@ -130,27 +129,27 @@ def main(argv: Sequence[str]) -> int:
     """
     
     SOURCE_PATTERNS = [
    -    "**/*.h",
    -    "**/*.hpp",
    -    "**/*.c",
    -    "**/*.cpp",
    +    '**/*.h',
    +    '**/*.hpp',
    +    '**/*.c',
    +    '**/*.cpp',
     ]
     """
     Recursive source file patterns, based on file extensions.
     """
     
     SOURCE_DIRS = [
    -    "src/common",
    -    "src/libbson",
    -    "src/libmongoc",
    -    "tests",
    +    'src/common',
    +    'src/libbson',
    +    'src/libmongoc',
    +    'tests',
     ]
     """
     Directories that contain our own source files (not vendored code)
     """
     
     EXCLUDE_SOURCES = [
    -    "src/libbson/src/jsonsl/**/*",
    +    'src/libbson/src/jsonsl/**/*',
     ]
     """
     Globbing patterns that select files that are contained in our source directories,
    @@ -181,14 +180,14 @@ def _include_subst_fn(fpath: Path):
     
         def f(mat: re.Match[str]) -> str:
             # See groups in INCLUDE_RE
    -        target = mat["path"]
    +        target = mat['path']
             abs_target = parent_dir / target
             if abs_target.is_file():
                 # This should be a relative include:
                 newl = f'{mat["directive"]}"./{target}"{mat["tail"]}'
             else:
    -            newl = f"{mat['directive']}<{target}>{mat['tail']}"
    -        print(f" - {fpath}: update #include directive: {mat[0]!r} → {newl!r}", file=sys.stderr)
    +            newl = f'{mat["directive"]}<{target}>{mat["tail"]}'
    +        print(f' - {fpath}: update #include directive: {mat[0]!r} → {newl!r}', file=sys.stderr)
             return newl
     
         return f
    @@ -199,7 +198,7 @@ def _fixup_includes(fpath: Path, *, mode: RunMode) -> bool:
         Apply #include-fixup to the content of the given source file.
         """
         # Split into lines
    -    old_lines = fpath.read_text().split("\n")
    +    old_lines = fpath.read_text().split('\n')
         # Do a regex substitution on ever line:
         rx = re.compile(INCLUDE_RE, re.VERBOSE)
         new_lines = [rx.sub(_include_subst_fn(fpath), ln) for ln in old_lines]
    @@ -210,14 +209,14 @@ def _fixup_includes(fpath: Path, *, mode: RunMode) -> bool:
             case False, _:
                 # No file changes. Nothing to do
                 return True
    -        case _, "apply":
    +        case _, 'apply':
                 # We are applying changes. Write the lines back into the file and tell
                 # the caller that we succeeded
    -            fpath.write_text("\n".join(new_lines), newline="\n")
    +            fpath.write_text('\n'.join(new_lines), newline='\n')
                 return True
    -        case _, "check":
    +        case _, 'check':
                 # File changes, and we are only checking. Print an error message and indicate failure to the caller
    -            print(f"File [{fpath}] contains improper #include directives", file=sys.stderr)
    +            print(f'File [{fpath}] contains improper #include directives', file=sys.stderr)
                 return False
     
     
    @@ -230,7 +229,7 @@ def fixup_one(p: Path) -> bool:
             try:
                 return _fixup_includes(p, mode=mode)
             except Exception as e:
    -            raise RuntimeError(f"Unexpected error while fixing-up the #includes on file [{p}] (See above)") from e
    +            raise RuntimeError(f'Unexpected error while fixing-up the #includes on file [{p}] (See above)') from e
     
         # First update the `#include` directives, since that can change the sort order
         # that clang-format might want to apply
    @@ -239,18 +238,18 @@ def fixup_one(p: Path) -> bool:
     
         # Whether we check for format violations or modify the files in-place
         match mode:
    -        case "apply":
    -            mode_args = ["-i"]
    -        case "check":
    -            mode_args = ["--dry-run", "-Werror"]
    +        case 'apply':
    +            mode_args = ['-i']
    +        case 'check':
    +            mode_args = ['--dry-run', '-Werror']
         cmd = [clang_format, *mode_args, *map(str, files)]
         try:
             res = subprocess.run(cmd, check=False, stderr=subprocess.STDOUT, stdout=subprocess.PIPE)
         except Exception as e:
    -        raise RuntimeError(f"Failed to spawn [{clang_format}] process for formatting files (See above)") from e
    +        raise RuntimeError(f'Failed to spawn [{clang_format}] process for formatting files (See above)') from e
         sys.stderr.buffer.write(res.stdout)
         return res.returncode == 0
     
     
    -if __name__ == "__main__":
    +if __name__ == '__main__':
         sys.exit(main(sys.argv[1:]))
    diff --git a/tools/paths.sh b/tools/paths.sh
    index 5e55532671e..a9a22f8cd2a 100755
    --- a/tools/paths.sh
    +++ b/tools/paths.sh
    @@ -28,89 +28,93 @@
     # Check for Cygpath, used by various commands. Better to check once than check every time.
     _HAVE_CYGPATH=false
     if have-command cygpath; then
    -    _HAVE_CYGPATH=true
    +  _HAVE_CYGPATH=true
     fi
     
     # Usage: native-path 
     native-path() {
    -    [[ "$#" -eq 1 ]] || fail "native_path expects exactly one argument"
    -    local arg=$1
    -    if $IS_WINDOWS; then
    -        $_HAVE_CYGPATH || fail "No 'cygpath' command is available, but we require it to normalize file paths on Windows."
    -        local ret
    -        ret="$(cygpath -w "$arg")"
    -        debug "Convert path [$arg] → [$ret]"
    -        printf %s "$ret"
    -    else
    -        printf %s "$arg"
    -    fi
    +  [[ "$#" -eq 1 ]] || fail "native_path expects exactly one argument"
    +  local arg=$1
    +  if $IS_WINDOWS; then
    +    $_HAVE_CYGPATH || fail "No 'cygpath' command is available, but we require it to normalize file paths on Windows."
    +    local ret
    +    ret="$(cygpath -w "$arg")"
    +    debug "Convert path [$arg] → [$ret]"
    +    printf %s "$ret"
    +  else
    +    printf %s "$arg"
    +  fi
     }
     
     # Usage: to_absolute 
     to_absolute() {
    -    [[ "$#" -eq 1 ]] || fail "to_absolute expects a single argument"
    -    local ret
    -    local arg="$1"
    -    debug "Resolve path [$arg]"
    -
    -    # Cygpath can resolve the path in a single subprocess:
    -    if $_HAVE_CYGPATH; then
    -        # Ask Cygpath to resolve the path. It knows how to do it reliably and quickly:
    -        ret=$(cygpath --absolute --mixed --long-name -- "$arg")
    -        debug "Cygpath resolved: [$arg]"
    -        printf %s "$ret"
    -        return 0
    -    fi
    -
    -    # If the given directory exists, we can ask the shell to resolve the path
    -    # by going there and asking the PWD:
    -    if is-dir "$arg"; then
    -        ret=$(run-chdir "$arg" pwd)
    -        debug "Resolved: [$arg]"
    -        printf %s "$ret"
    -        return 0
    -    fi
    -
    -    # Do it the "slow" way:
    -
    -    # The parent path:
    -    local _parent
    -    _parent="$(dirname "$arg")"
    -    # The filename part:
    -    local _fname
    -    _fname="$(basename "$arg")"
    -    # There are four cases to consider from dirname:
    -    if [[ $_parent = "." ]]; then  # The parent is '.' as in './foo'
    -        # Replace the leading '.' with the working directory
    -        _parent="$(pwd)"
    -    elif [[ $_parent = ".." ]]; then  # The parent is '..' as in '../foo'
    -        # Replace a leading '..' with the parent of the working directory
    -        _parent="$(dirname "$(pwd)")"
    -    elif [[ $arg == "$_parent" ]]; then  # The parent is itself, as in '/'
    -        # A root directory is its own parent according to 'dirname'
    -        printf %s "$arg"
    -        return 0
    -    else  # The parent is some other path, like 'foo' in 'foo/bar'
    -        # Resolve the parent path
    -        _parent="$(set +x; DEBUG=0 to_absolute "$_parent")"
    -    fi
    -    # At this point $_parent is an absolute path
    -    if [[ $_fname = ".." ]]; then
    -        # Strip one component
    -        ret="$(dirname "$_parent")"
    -    elif [[ $_fname = "." ]]; then
    -        # Drop a '.' at the end of a path
    -        ret="$_parent"
    -    else
    -        # Join the result
    -        ret="$_parent/$_fname"
    -    fi
    -    # Remove duplicate dir separators
    -    while [[ $ret =~ "//" ]]; do
    -        ret="${ret//\/\///}"
    -    done
    -    debug "Resolved path: [$arg] → [$ret]"
    +  [[ "$#" -eq 1 ]] || fail "to_absolute expects a single argument"
    +  local ret
    +  local arg="$1"
    +  debug "Resolve path [$arg]"
    +
    +  # Cygpath can resolve the path in a single subprocess:
    +  if $_HAVE_CYGPATH; then
    +    # Ask Cygpath to resolve the path. It knows how to do it reliably and quickly:
    +    ret=$(cygpath --absolute --mixed --long-name -- "$arg")
    +    debug "Cygpath resolved: [$arg]"
    +    printf %s "$ret"
    +    return 0
    +  fi
    +
    +  # If the given directory exists, we can ask the shell to resolve the path
    +  # by going there and asking the PWD:
    +  if is-dir "$arg"; then
    +    ret=$(run-chdir "$arg" pwd)
    +    debug "Resolved: [$arg]"
         printf %s "$ret"
    +    return 0
    +  fi
    +
    +  # Do it the "slow" way:
    +
    +  # The parent path:
    +  local _parent
    +  _parent="$(dirname "$arg")"
    +  # The filename part:
    +  local _fname
    +  _fname="$(basename "$arg")"
    +  # There are four cases to consider from dirname:
    +  if [[ $_parent = "." ]]; then # The parent is '.' as in './foo'
    +    # Replace the leading '.' with the working directory
    +    _parent="$(pwd)"
    +  elif [[ $_parent = ".." ]]; then # The parent is '..' as in '../foo'
    +    # Replace a leading '..' with the parent of the working directory
    +    _parent="$(dirname "$(pwd)")"
    +  elif [[ $arg == "$_parent" ]]; then # The parent is itself, as in '/'
    +    # A root directory is its own parent according to 'dirname'
    +    printf %s "$arg"
    +    return 0
    +  else
    +    # The parent is some other path, like 'foo' in 'foo/bar'
    +    # Resolve the parent path
    +    _parent="$(
    +      set +x
    +      DEBUG=0 to_absolute "$_parent"
    +    )"
    +  fi
    +  # At this point $_parent is an absolute path
    +  if [[ $_fname = ".." ]]; then
    +    # Strip one component
    +    ret="$(dirname "$_parent")"
    +  elif [[ $_fname = "." ]]; then
    +    # Drop a '.' at the end of a path
    +    ret="$_parent"
    +  else
    +    # Join the result
    +    ret="$_parent/$_fname"
    +  fi
    +  # Remove duplicate dir separators
    +  while [[ $ret =~ "//" ]]; do
    +    ret="${ret//\/\///}"
    +  done
    +  debug "Resolved path: [$arg] → [$ret]"
    +  printf %s "$ret"
     }
     
     # Get the TOOLS_DIR as a native absolute path. All other path vars are derived
    @@ -125,21 +129,21 @@ declare -r MONGOC_DIR=$MONGOC_DIR
     
     EXE_SUFFIX=""
     if $IS_WINDOWS; then
    -    EXE_SUFFIX=".exe"
    +  EXE_SUFFIX=".exe"
     fi
     declare -r EXE_SUFFIX=$EXE_SUFFIX
     
     if [[ "${USER_CACHES_DIR:=${XDG_CACHE_HOME:-}}" = "" ]]; then
    -    if $IS_DARWIN; then
    -        USER_CACHES_DIR=$HOME/Library/Caches
    -    elif $IS_UNIX_LIKE; then
    -        USER_CACHES_DIR=$HOME/.cache
    -    elif $IS_WINDOWS; then
    -        USER_CACHES_DIR=${LOCALAPPDATA:-$USERPROFILE/.cache}
    -    else
    -        log "Using ~/.cache as fallback user caching directory"
    -        USER_CACHES_DIR="$(to_absolute ~/.cache)"
    -    fi
    +  if $IS_DARWIN; then
    +    USER_CACHES_DIR=$HOME/Library/Caches
    +  elif $IS_UNIX_LIKE; then
    +    USER_CACHES_DIR=$HOME/.cache
    +  elif $IS_WINDOWS; then
    +    USER_CACHES_DIR=${LOCALAPPDATA:-$USERPROFILE/.cache}
    +  else
    +    log "Using ~/.cache as fallback user caching directory"
    +    USER_CACHES_DIR="$(to_absolute ~/.cache)"
    +  fi
     fi
     
     # Ensure we are dealing with a complete path
    @@ -150,12 +154,12 @@ declare -r USER_CACHES_DIR=$USER_CACHES_DIR
     : "${BUILD_CACHE_DIR:="$USER_CACHES_DIR/mongoc/build.$BUILD_CACHE_BUST"}"
     
     if is-main; then
    -    # Just print the paths that we detected
    -    log "Paths:"
    -    log " • USER_CACHES_DIR=[$USER_CACHES_DIR]"
    -    log " • BUILD_CACHE_DIR=[$BUILD_CACHE_DIR]"
    -    log " • BUILD_CACHE_BUST=[$BUILD_CACHE_BUST]"
    -    log " • EXE_SUFFIX=[$EXE_SUFFIX]"
    -    log " • TOOLS_DIR=[$TOOLS_DIR]"
    -    log " • MONGOC_DIR=[$MONGOC_DIR]"
    +  # Just print the paths that we detected
    +  log "Paths:"
    +  log " • USER_CACHES_DIR=[$USER_CACHES_DIR]"
    +  log " • BUILD_CACHE_DIR=[$BUILD_CACHE_DIR]"
    +  log " • BUILD_CACHE_BUST=[$BUILD_CACHE_BUST]"
    +  log " • EXE_SUFFIX=[$EXE_SUFFIX]"
    +  log " • TOOLS_DIR=[$TOOLS_DIR]"
    +  log " • MONGOC_DIR=[$MONGOC_DIR]"
     fi
    diff --git a/tools/platform.sh b/tools/platform.sh
    index e25a779646c..566d856def9 100755
    --- a/tools/platform.sh
    +++ b/tools/platform.sh
    @@ -21,31 +21,31 @@ _is_wsl=false
     _is_bsd=false
     _os_family=unknown
     case "$OSTYPE" in
    -    linux-*)
    -        if have-command cmd.exe; then
    -            _is_wsl=true
    -            _is_unix_like=true
    -            _os_family=windows
    -        else
    -            _is_linux=true
    -            _is_unix_like=true
    -            _os_family=linux
    -        fi
    -        ;;
    -    darwin*)
    -        _is_darwin=true
    -        _is_unix_like=true
    -        _os_family=darwin
    -        ;;
    -    FreeBSD|openbsd*|netbsd)
    -        _is_bsd=true
    -        _is_unix_like=true
    -        _os_family=bsd
    -        ;;
    -    msys*|cygwin*)
    -        _is_windows=true
    -        _os_family=windows
    -        ;;
    +linux-*)
    +  if have-command cmd.exe; then
    +    _is_wsl=true
    +    _is_unix_like=true
    +    _os_family=windows
    +  else
    +    _is_linux=true
    +    _is_unix_like=true
    +    _os_family=linux
    +  fi
    +  ;;
    +darwin*)
    +  _is_darwin=true
    +  _is_unix_like=true
    +  _os_family=darwin
    +  ;;
    +FreeBSD | openbsd* | netbsd)
    +  _is_bsd=true
    +  _is_unix_like=true
    +  _os_family=bsd
    +  ;;
    +msys* | cygwin*)
    +  _is_windows=true
    +  _os_family=windows
    +  ;;
     esac
     
     declare -r IS_DARWIN=$_is_darwin
    @@ -59,33 +59,33 @@ declare -r OS_FAMILY=$_os_family
     _is_redhat_based=false
     _is_debian_based=false
     if $IS_LINUX; then
    -    if is-file /etc/redhat-release; then
    -        _is_redhat_based=true
    -        _dist_version=$(sed 's|.*release \([^ ]\+\).*|\1|' < /etc/redhat-release)
    -    elif is-file /etc/debian_version; then
    -        _is_debian_based=true
    -        _dist_version=$(grep VERSION_ID /etc/os-release | sed 's|VERSION_ID="\(.*\)"|\1|')
    -    elif is-file /etc/alpine-release; then
    -        _is_alpine=true
    -        _dist_version=$(cat /etc/alpine-release)
    -    fi
    -    _dist_version=${_dist_version:-0}
    -    _major_version=${_dist_version/.*/}
    -    declare -r DIST_VERSION=$_dist_version
    -    declare -r DIST_MAJOR_VERSION=$_major_version
    +  if is-file /etc/redhat-release; then
    +    _is_redhat_based=true
    +    _dist_version=$(sed 's|.*release \([^ ]\+\).*|\1|' /dev/null
    +
    +include=(
    +  "${root_dir:?}/.evergreen"
    +  "${root_dir:?}/src"
    +  "${root_dir:?}/tools"
    +)
    +
    +exclude=(
    +  "${root_dir:?}/.evergreen/scripts/uv-installer.sh"
    +)
    +
    +mapfile -t files < <(find "${include[@]:?}" -name '*.sh' -type f | grep -v "${exclude[@]:?}")
    +
    +for file in "${files[@]:?}"; do
    +  uv run --frozen --group format-scripts shfmt -i 2 -w "${file:?}"
    +done
    diff --git a/tools/use.sh b/tools/use.sh
    index 633f1051669..dea7d5dc8b3 100755
    --- a/tools/use.sh
    +++ b/tools/use.sh
    @@ -37,46 +37,46 @@ declare -a _USE_IMPORTING
     _IS_MAIN=false
     
     for item in "$@"; do
    -    # Don't double-import items:
    -    _varname="_IMPORTED_$item"
    -    if [[ -n "${!_varname+n}" ]]; then
    -        continue
    -    fi
    -    # Push this item:
    -    _USE_IMPORTING+=("$item")
    -    # The file to be imported:
    -    file=$_this_dir/$item.sh
    -    ! [[ ${PRINT_DEBUG_LOGS:-} = 1 ]] || echo "Import: [$item]" 1>&2
    -    _err=0
    -    # Detect self-import:
    -    if printf '%s\0' "${BASH_SOURCE[@]}" | grep -qFxZ -- "$file"; then
    -        echo "File '$file' imports itself transitively" 1>&2
    -        _err=1
    -    fi
    -    # Detect non-existing imports:
    -    if ! [[ -f $file ]]; then
    -        echo "No script '$file' exists to import." 1>&2
    -        _err=1
    -    fi
    -    # Print the stacktrace of imports upon error:
    -    if [[ $_err -eq 1 ]]; then
    -        printf " • [%s] loaded by:\n" "${BASH_SOURCE[@]}" 1>&2
    -        echo " • (user)" 1>&2
    -        echo "Bailing out" 1>&2
    -        return 1
    -    fi
    -    # shellcheck disable=1090
    -    . "$file"
    -    # Recover item from the stack, since we may have recursed:
    -    item="${_USE_IMPORTING[${#_USE_IMPORTING[@]}-1]}"
    -    # Pop the top stack item:
    -    unset "_USE_IMPORTING[${#_USE_IMPORTING[@]}-1]"
    -    # Declare that the item has been imported, for future reference:
    -    declare "_IMPORTED_$item=1"
    -    ! [[ ${PRINT_DEBUG_LOGS:-} = 1 ]] || echo "Import: [$item] - done" 1>&2
    +  # Don't double-import items:
    +  _varname="_IMPORTED_$item"
    +  if [[ -n "${!_varname+n}" ]]; then
    +    continue
    +  fi
    +  # Push this item:
    +  _USE_IMPORTING+=("$item")
    +  # The file to be imported:
    +  file=$_this_dir/$item.sh
    +  ! [[ ${PRINT_DEBUG_LOGS:-} = 1 ]] || echo "Import: [$item]" 1>&2
    +  _err=0
    +  # Detect self-import:
    +  if printf '%s\0' "${BASH_SOURCE[@]}" | grep -qFxZ -- "$file"; then
    +    echo "File '$file' imports itself transitively" 1>&2
    +    _err=1
    +  fi
    +  # Detect non-existing imports:
    +  if ! [[ -f $file ]]; then
    +    echo "No script '$file' exists to import." 1>&2
    +    _err=1
    +  fi
    +  # Print the stacktrace of imports upon error:
    +  if [[ $_err -eq 1 ]]; then
    +    printf " • [%s] loaded by:\n" "${BASH_SOURCE[@]}" 1>&2
    +    echo " • (user)" 1>&2
    +    echo "Bailing out" 1>&2
    +    return 1
    +  fi
    +  # shellcheck disable=1090
    +  . "$file"
    +  # Recover item from the stack, since we may have recursed:
    +  item="${_USE_IMPORTING[${#_USE_IMPORTING[@]} - 1]}"
    +  # Pop the top stack item:
    +  unset "_USE_IMPORTING[${#_USE_IMPORTING[@]}-1]"
    +  # Declare that the item has been imported, for future reference:
    +  declare "_IMPORTED_$item=1"
    +  ! [[ ${PRINT_DEBUG_LOGS:-} = 1 ]] || echo "Import: [$item] - done" 1>&2
     done
     
     # Set _IS_MAIN to zero if the import stack is empty
     if [[ "${_USE_IMPORTING+${_USE_IMPORTING[*]}}" = "" ]]; then
    -    _IS_MAIN=true
    +  _IS_MAIN=true
     fi
    diff --git a/uv.lock b/uv.lock
    index a5a5c91febc..0695f610b90 100644
    --- a/uv.lock
    +++ b/uv.lock
    @@ -1,5 +1,5 @@
     version = 1
    -revision = 2
    +revision = 3
     requires-python = ">=3.10"
     resolution-markers = [
         "python_full_version >= '3.11'",
    @@ -47,85 +47,88 @@ wheels = [
     
     [[package]]
     name = "beautifulsoup4"
    -version = "4.13.4"
    +version = "4.13.5"
     source = { registry = "https://pypi.org/simple" }
     dependencies = [
         { name = "soupsieve" },
         { name = "typing-extensions" },
     ]
    -sdist = { url = "https://files.pythonhosted.org/packages/d8/e4/0c4c39e18fd76d6a628d4dd8da40543d136ce2d1752bd6eeeab0791f4d6b/beautifulsoup4-4.13.4.tar.gz", hash = "sha256:dbb3c4e1ceae6aefebdaf2423247260cd062430a410e38c66f2baa50a8437195", size = 621067, upload-time = "2025-04-15T17:05:13.836Z" }
    +sdist = { url = "https://files.pythonhosted.org/packages/85/2e/3e5079847e653b1f6dc647aa24549d68c6addb4c595cc0d902d1b19308ad/beautifulsoup4-4.13.5.tar.gz", hash = "sha256:5e70131382930e7c3de33450a2f54a63d5e4b19386eab43a5b34d594268f3695", size = 622954, upload-time = "2025-08-24T14:06:13.168Z" }
     wheels = [
    -    { url = "https://files.pythonhosted.org/packages/50/cd/30110dc0ffcf3b131156077b90e9f60ed75711223f306da4db08eff8403b/beautifulsoup4-4.13.4-py3-none-any.whl", hash = "sha256:9bbbb14bfde9d79f38b8cd5f8c7c85f4b8f2523190ebed90e950a8dea4cb1c4b", size = 187285, upload-time = "2025-04-15T17:05:12.221Z" },
    +    { url = "https://files.pythonhosted.org/packages/04/eb/f4151e0c7377a6e08a38108609ba5cede57986802757848688aeedd1b9e8/beautifulsoup4-4.13.5-py3-none-any.whl", hash = "sha256:642085eaa22233aceadff9c69651bc51e8bf3f874fb6d7104ece2beb24b47c4a", size = 105113, upload-time = "2025-08-24T14:06:14.884Z" },
     ]
     
     [[package]]
     name = "certifi"
    -version = "2025.7.14"
    +version = "2025.8.3"
     source = { registry = "https://pypi.org/simple" }
    -sdist = { url = "https://files.pythonhosted.org/packages/b3/76/52c535bcebe74590f296d6c77c86dabf761c41980e1347a2422e4aa2ae41/certifi-2025.7.14.tar.gz", hash = "sha256:8ea99dbdfaaf2ba2f9bac77b9249ef62ec5218e7c2b2e903378ed5fccf765995", size = 163981, upload-time = "2025-07-14T03:29:28.449Z" }
    +sdist = { url = "https://files.pythonhosted.org/packages/dc/67/960ebe6bf230a96cda2e0abcf73af550ec4f090005363542f0765df162e0/certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407", size = 162386, upload-time = "2025-08-03T03:07:47.08Z" }
     wheels = [
    -    { url = "https://files.pythonhosted.org/packages/4f/52/34c6cf5bb9285074dc3531c437b3919e825d976fde097a7a73f79e726d03/certifi-2025.7.14-py3-none-any.whl", hash = "sha256:6b31f564a415d79ee77df69d757bb49a5bb53bd9f756cbbe24394ffd6fc1f4b2", size = 162722, upload-time = "2025-07-14T03:29:26.863Z" },
    +    { url = "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", size = 161216, upload-time = "2025-08-03T03:07:45.777Z" },
     ]
     
     [[package]]
     name = "charset-normalizer"
    -version = "3.4.2"
    -source = { registry = "https://pypi.org/simple" }
    -sdist = { url = "https://files.pythonhosted.org/packages/e4/33/89c2ced2b67d1c2a61c19c6751aa8902d46ce3dacb23600a283619f5a12d/charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63", size = 126367, upload-time = "2025-05-02T08:34:42.01Z" }
    -wheels = [
    -    { url = "https://files.pythonhosted.org/packages/95/28/9901804da60055b406e1a1c5ba7aac1276fb77f1dde635aabfc7fd84b8ab/charset_normalizer-3.4.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941", size = 201818, upload-time = "2025-05-02T08:31:46.725Z" },
    -    { url = "https://files.pythonhosted.org/packages/d9/9b/892a8c8af9110935e5adcbb06d9c6fe741b6bb02608c6513983048ba1a18/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd", size = 144649, upload-time = "2025-05-02T08:31:48.889Z" },
    -    { url = "https://files.pythonhosted.org/packages/7b/a5/4179abd063ff6414223575e008593861d62abfc22455b5d1a44995b7c101/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cbfacf36cb0ec2897ce0ebc5d08ca44213af24265bd56eca54bee7923c48fd6", size = 155045, upload-time = "2025-05-02T08:31:50.757Z" },
    -    { url = "https://files.pythonhosted.org/packages/3b/95/bc08c7dfeddd26b4be8c8287b9bb055716f31077c8b0ea1cd09553794665/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18dd2e350387c87dabe711b86f83c9c78af772c748904d372ade190b5c7c9d4d", size = 147356, upload-time = "2025-05-02T08:31:52.634Z" },
    -    { url = "https://files.pythonhosted.org/packages/a8/2d/7a5b635aa65284bf3eab7653e8b4151ab420ecbae918d3e359d1947b4d61/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86", size = 149471, upload-time = "2025-05-02T08:31:56.207Z" },
    -    { url = "https://files.pythonhosted.org/packages/ae/38/51fc6ac74251fd331a8cfdb7ec57beba8c23fd5493f1050f71c87ef77ed0/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5bf4545e3b962767e5c06fe1738f951f77d27967cb2caa64c28be7c4563e162c", size = 151317, upload-time = "2025-05-02T08:31:57.613Z" },
    -    { url = "https://files.pythonhosted.org/packages/b7/17/edee1e32215ee6e9e46c3e482645b46575a44a2d72c7dfd49e49f60ce6bf/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a6ab32f7210554a96cd9e33abe3ddd86732beeafc7a28e9955cdf22ffadbab0", size = 146368, upload-time = "2025-05-02T08:31:59.468Z" },
    -    { url = "https://files.pythonhosted.org/packages/26/2c/ea3e66f2b5f21fd00b2825c94cafb8c326ea6240cd80a91eb09e4a285830/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b33de11b92e9f75a2b545d6e9b6f37e398d86c3e9e9653c4864eb7e89c5773ef", size = 154491, upload-time = "2025-05-02T08:32:01.219Z" },
    -    { url = "https://files.pythonhosted.org/packages/52/47/7be7fa972422ad062e909fd62460d45c3ef4c141805b7078dbab15904ff7/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8755483f3c00d6c9a77f490c17e6ab0c8729e39e6390328e42521ef175380ae6", size = 157695, upload-time = "2025-05-02T08:32:03.045Z" },
    -    { url = "https://files.pythonhosted.org/packages/2f/42/9f02c194da282b2b340f28e5fb60762de1151387a36842a92b533685c61e/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:68a328e5f55ec37c57f19ebb1fdc56a248db2e3e9ad769919a58672958e8f366", size = 154849, upload-time = "2025-05-02T08:32:04.651Z" },
    -    { url = "https://files.pythonhosted.org/packages/67/44/89cacd6628f31fb0b63201a618049be4be2a7435a31b55b5eb1c3674547a/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:21b2899062867b0e1fde9b724f8aecb1af14f2778d69aacd1a5a1853a597a5db", size = 150091, upload-time = "2025-05-02T08:32:06.719Z" },
    -    { url = "https://files.pythonhosted.org/packages/1f/79/4b8da9f712bc079c0f16b6d67b099b0b8d808c2292c937f267d816ec5ecc/charset_normalizer-3.4.2-cp310-cp310-win32.whl", hash = "sha256:e8082b26888e2f8b36a042a58307d5b917ef2b1cacab921ad3323ef91901c71a", size = 98445, upload-time = "2025-05-02T08:32:08.66Z" },
    -    { url = "https://files.pythonhosted.org/packages/7d/d7/96970afb4fb66497a40761cdf7bd4f6fca0fc7bafde3a84f836c1f57a926/charset_normalizer-3.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:f69a27e45c43520f5487f27627059b64aaf160415589230992cec34c5e18a509", size = 105782, upload-time = "2025-05-02T08:32:10.46Z" },
    -    { url = "https://files.pythonhosted.org/packages/05/85/4c40d00dcc6284a1c1ad5de5e0996b06f39d8232f1031cd23c2f5c07ee86/charset_normalizer-3.4.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2", size = 198794, upload-time = "2025-05-02T08:32:11.945Z" },
    -    { url = "https://files.pythonhosted.org/packages/41/d9/7a6c0b9db952598e97e93cbdfcb91bacd89b9b88c7c983250a77c008703c/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645", size = 142846, upload-time = "2025-05-02T08:32:13.946Z" },
    -    { url = "https://files.pythonhosted.org/packages/66/82/a37989cda2ace7e37f36c1a8ed16c58cf48965a79c2142713244bf945c89/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd", size = 153350, upload-time = "2025-05-02T08:32:15.873Z" },
    -    { url = "https://files.pythonhosted.org/packages/df/68/a576b31b694d07b53807269d05ec3f6f1093e9545e8607121995ba7a8313/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8", size = 145657, upload-time = "2025-05-02T08:32:17.283Z" },
    -    { url = "https://files.pythonhosted.org/packages/92/9b/ad67f03d74554bed3aefd56fe836e1623a50780f7c998d00ca128924a499/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f", size = 147260, upload-time = "2025-05-02T08:32:18.807Z" },
    -    { url = "https://files.pythonhosted.org/packages/a6/e6/8aebae25e328160b20e31a7e9929b1578bbdc7f42e66f46595a432f8539e/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7", size = 149164, upload-time = "2025-05-02T08:32:20.333Z" },
    -    { url = "https://files.pythonhosted.org/packages/8b/f2/b3c2f07dbcc248805f10e67a0262c93308cfa149a4cd3d1fe01f593e5fd2/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9", size = 144571, upload-time = "2025-05-02T08:32:21.86Z" },
    -    { url = "https://files.pythonhosted.org/packages/60/5b/c3f3a94bc345bc211622ea59b4bed9ae63c00920e2e8f11824aa5708e8b7/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544", size = 151952, upload-time = "2025-05-02T08:32:23.434Z" },
    -    { url = "https://files.pythonhosted.org/packages/e2/4d/ff460c8b474122334c2fa394a3f99a04cf11c646da895f81402ae54f5c42/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82", size = 155959, upload-time = "2025-05-02T08:32:24.993Z" },
    -    { url = "https://files.pythonhosted.org/packages/a2/2b/b964c6a2fda88611a1fe3d4c400d39c66a42d6c169c924818c848f922415/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0", size = 153030, upload-time = "2025-05-02T08:32:26.435Z" },
    -    { url = "https://files.pythonhosted.org/packages/59/2e/d3b9811db26a5ebf444bc0fa4f4be5aa6d76fc6e1c0fd537b16c14e849b6/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5", size = 148015, upload-time = "2025-05-02T08:32:28.376Z" },
    -    { url = "https://files.pythonhosted.org/packages/90/07/c5fd7c11eafd561bb51220d600a788f1c8d77c5eef37ee49454cc5c35575/charset_normalizer-3.4.2-cp311-cp311-win32.whl", hash = "sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a", size = 98106, upload-time = "2025-05-02T08:32:30.281Z" },
    -    { url = "https://files.pythonhosted.org/packages/a8/05/5e33dbef7e2f773d672b6d79f10ec633d4a71cd96db6673625838a4fd532/charset_normalizer-3.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28", size = 105402, upload-time = "2025-05-02T08:32:32.191Z" },
    -    { url = "https://files.pythonhosted.org/packages/d7/a4/37f4d6035c89cac7930395a35cc0f1b872e652eaafb76a6075943754f095/charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7", size = 199936, upload-time = "2025-05-02T08:32:33.712Z" },
    -    { url = "https://files.pythonhosted.org/packages/ee/8a/1a5e33b73e0d9287274f899d967907cd0bf9c343e651755d9307e0dbf2b3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3", size = 143790, upload-time = "2025-05-02T08:32:35.768Z" },
    -    { url = "https://files.pythonhosted.org/packages/66/52/59521f1d8e6ab1482164fa21409c5ef44da3e9f653c13ba71becdd98dec3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a", size = 153924, upload-time = "2025-05-02T08:32:37.284Z" },
    -    { url = "https://files.pythonhosted.org/packages/86/2d/fb55fdf41964ec782febbf33cb64be480a6b8f16ded2dbe8db27a405c09f/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214", size = 146626, upload-time = "2025-05-02T08:32:38.803Z" },
    -    { url = "https://files.pythonhosted.org/packages/8c/73/6ede2ec59bce19b3edf4209d70004253ec5f4e319f9a2e3f2f15601ed5f7/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a", size = 148567, upload-time = "2025-05-02T08:32:40.251Z" },
    -    { url = "https://files.pythonhosted.org/packages/09/14/957d03c6dc343c04904530b6bef4e5efae5ec7d7990a7cbb868e4595ee30/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd", size = 150957, upload-time = "2025-05-02T08:32:41.705Z" },
    -    { url = "https://files.pythonhosted.org/packages/0d/c8/8174d0e5c10ccebdcb1b53cc959591c4c722a3ad92461a273e86b9f5a302/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981", size = 145408, upload-time = "2025-05-02T08:32:43.709Z" },
    -    { url = "https://files.pythonhosted.org/packages/58/aa/8904b84bc8084ac19dc52feb4f5952c6df03ffb460a887b42615ee1382e8/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c", size = 153399, upload-time = "2025-05-02T08:32:46.197Z" },
    -    { url = "https://files.pythonhosted.org/packages/c2/26/89ee1f0e264d201cb65cf054aca6038c03b1a0c6b4ae998070392a3ce605/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b", size = 156815, upload-time = "2025-05-02T08:32:48.105Z" },
    -    { url = "https://files.pythonhosted.org/packages/fd/07/68e95b4b345bad3dbbd3a8681737b4338ff2c9df29856a6d6d23ac4c73cb/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d", size = 154537, upload-time = "2025-05-02T08:32:49.719Z" },
    -    { url = "https://files.pythonhosted.org/packages/77/1a/5eefc0ce04affb98af07bc05f3bac9094513c0e23b0562d64af46a06aae4/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f", size = 149565, upload-time = "2025-05-02T08:32:51.404Z" },
    -    { url = "https://files.pythonhosted.org/packages/37/a0/2410e5e6032a174c95e0806b1a6585eb21e12f445ebe239fac441995226a/charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c", size = 98357, upload-time = "2025-05-02T08:32:53.079Z" },
    -    { url = "https://files.pythonhosted.org/packages/6c/4f/c02d5c493967af3eda9c771ad4d2bbc8df6f99ddbeb37ceea6e8716a32bc/charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e", size = 105776, upload-time = "2025-05-02T08:32:54.573Z" },
    -    { url = "https://files.pythonhosted.org/packages/ea/12/a93df3366ed32db1d907d7593a94f1fe6293903e3e92967bebd6950ed12c/charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0", size = 199622, upload-time = "2025-05-02T08:32:56.363Z" },
    -    { url = "https://files.pythonhosted.org/packages/04/93/bf204e6f344c39d9937d3c13c8cd5bbfc266472e51fc8c07cb7f64fcd2de/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf", size = 143435, upload-time = "2025-05-02T08:32:58.551Z" },
    -    { url = "https://files.pythonhosted.org/packages/22/2a/ea8a2095b0bafa6c5b5a55ffdc2f924455233ee7b91c69b7edfcc9e02284/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e", size = 153653, upload-time = "2025-05-02T08:33:00.342Z" },
    -    { url = "https://files.pythonhosted.org/packages/b6/57/1b090ff183d13cef485dfbe272e2fe57622a76694061353c59da52c9a659/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1", size = 146231, upload-time = "2025-05-02T08:33:02.081Z" },
    -    { url = "https://files.pythonhosted.org/packages/e2/28/ffc026b26f441fc67bd21ab7f03b313ab3fe46714a14b516f931abe1a2d8/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c", size = 148243, upload-time = "2025-05-02T08:33:04.063Z" },
    -    { url = "https://files.pythonhosted.org/packages/c0/0f/9abe9bd191629c33e69e47c6ef45ef99773320e9ad8e9cb08b8ab4a8d4cb/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691", size = 150442, upload-time = "2025-05-02T08:33:06.418Z" },
    -    { url = "https://files.pythonhosted.org/packages/67/7c/a123bbcedca91d5916c056407f89a7f5e8fdfce12ba825d7d6b9954a1a3c/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0", size = 145147, upload-time = "2025-05-02T08:33:08.183Z" },
    -    { url = "https://files.pythonhosted.org/packages/ec/fe/1ac556fa4899d967b83e9893788e86b6af4d83e4726511eaaad035e36595/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b", size = 153057, upload-time = "2025-05-02T08:33:09.986Z" },
    -    { url = "https://files.pythonhosted.org/packages/2b/ff/acfc0b0a70b19e3e54febdd5301a98b72fa07635e56f24f60502e954c461/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff", size = 156454, upload-time = "2025-05-02T08:33:11.814Z" },
    -    { url = "https://files.pythonhosted.org/packages/92/08/95b458ce9c740d0645feb0e96cea1f5ec946ea9c580a94adfe0b617f3573/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b", size = 154174, upload-time = "2025-05-02T08:33:13.707Z" },
    -    { url = "https://files.pythonhosted.org/packages/78/be/8392efc43487ac051eee6c36d5fbd63032d78f7728cb37aebcc98191f1ff/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148", size = 149166, upload-time = "2025-05-02T08:33:15.458Z" },
    -    { url = "https://files.pythonhosted.org/packages/44/96/392abd49b094d30b91d9fbda6a69519e95802250b777841cf3bda8fe136c/charset_normalizer-3.4.2-cp313-cp313-win32.whl", hash = "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7", size = 98064, upload-time = "2025-05-02T08:33:17.06Z" },
    -    { url = "https://files.pythonhosted.org/packages/e9/b0/0200da600134e001d91851ddc797809e2fe0ea72de90e09bec5a2fbdaccb/charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980", size = 105641, upload-time = "2025-05-02T08:33:18.753Z" },
    -    { url = "https://files.pythonhosted.org/packages/20/94/c5790835a017658cbfabd07f3bfb549140c3ac458cfc196323996b10095a/charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0", size = 52626, upload-time = "2025-05-02T08:34:40.053Z" },
    +version = "3.4.3"
    +source = { registry = "https://pypi.org/simple" }
    +sdist = { url = "https://files.pythonhosted.org/packages/83/2d/5fd176ceb9b2fc619e63405525573493ca23441330fcdaee6bef9460e924/charset_normalizer-3.4.3.tar.gz", hash = "sha256:6fce4b8500244f6fcb71465d4a4930d132ba9ab8e71a7859e6a5d59851068d14", size = 122371, upload-time = "2025-08-09T07:57:28.46Z" }
    +wheels = [
    +    { url = "https://files.pythonhosted.org/packages/d6/98/f3b8013223728a99b908c9344da3aa04ee6e3fa235f19409033eda92fb78/charset_normalizer-3.4.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fb7f67a1bfa6e40b438170ebdc8158b78dc465a5a67b6dde178a46987b244a72", size = 207695, upload-time = "2025-08-09T07:55:36.452Z" },
    +    { url = "https://files.pythonhosted.org/packages/21/40/5188be1e3118c82dcb7c2a5ba101b783822cfb413a0268ed3be0468532de/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc9370a2da1ac13f0153780040f465839e6cccb4a1e44810124b4e22483c93fe", size = 147153, upload-time = "2025-08-09T07:55:38.467Z" },
    +    { url = "https://files.pythonhosted.org/packages/37/60/5d0d74bc1e1380f0b72c327948d9c2aca14b46a9efd87604e724260f384c/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:07a0eae9e2787b586e129fdcbe1af6997f8d0e5abaa0bc98c0e20e124d67e601", size = 160428, upload-time = "2025-08-09T07:55:40.072Z" },
    +    { url = "https://files.pythonhosted.org/packages/85/9a/d891f63722d9158688de58d050c59dc3da560ea7f04f4c53e769de5140f5/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:74d77e25adda8581ffc1c720f1c81ca082921329452eba58b16233ab1842141c", size = 157627, upload-time = "2025-08-09T07:55:41.706Z" },
    +    { url = "https://files.pythonhosted.org/packages/65/1a/7425c952944a6521a9cfa7e675343f83fd82085b8af2b1373a2409c683dc/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d0e909868420b7049dafd3a31d45125b31143eec59235311fc4c57ea26a4acd2", size = 152388, upload-time = "2025-08-09T07:55:43.262Z" },
    +    { url = "https://files.pythonhosted.org/packages/f0/c9/a2c9c2a355a8594ce2446085e2ec97fd44d323c684ff32042e2a6b718e1d/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c6f162aabe9a91a309510d74eeb6507fab5fff92337a15acbe77753d88d9dcf0", size = 150077, upload-time = "2025-08-09T07:55:44.903Z" },
    +    { url = "https://files.pythonhosted.org/packages/3b/38/20a1f44e4851aa1c9105d6e7110c9d020e093dfa5836d712a5f074a12bf7/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4ca4c094de7771a98d7fbd67d9e5dbf1eb73efa4f744a730437d8a3a5cf994f0", size = 161631, upload-time = "2025-08-09T07:55:46.346Z" },
    +    { url = "https://files.pythonhosted.org/packages/a4/fa/384d2c0f57edad03d7bec3ebefb462090d8905b4ff5a2d2525f3bb711fac/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:02425242e96bcf29a49711b0ca9f37e451da7c70562bc10e8ed992a5a7a25cc0", size = 159210, upload-time = "2025-08-09T07:55:47.539Z" },
    +    { url = "https://files.pythonhosted.org/packages/33/9e/eca49d35867ca2db336b6ca27617deed4653b97ebf45dfc21311ce473c37/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:78deba4d8f9590fe4dae384aeff04082510a709957e968753ff3c48399f6f92a", size = 153739, upload-time = "2025-08-09T07:55:48.744Z" },
    +    { url = "https://files.pythonhosted.org/packages/2a/91/26c3036e62dfe8de8061182d33be5025e2424002125c9500faff74a6735e/charset_normalizer-3.4.3-cp310-cp310-win32.whl", hash = "sha256:d79c198e27580c8e958906f803e63cddb77653731be08851c7df0b1a14a8fc0f", size = 99825, upload-time = "2025-08-09T07:55:50.305Z" },
    +    { url = "https://files.pythonhosted.org/packages/e2/c6/f05db471f81af1fa01839d44ae2a8bfeec8d2a8b4590f16c4e7393afd323/charset_normalizer-3.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:c6e490913a46fa054e03699c70019ab869e990270597018cef1d8562132c2669", size = 107452, upload-time = "2025-08-09T07:55:51.461Z" },
    +    { url = "https://files.pythonhosted.org/packages/7f/b5/991245018615474a60965a7c9cd2b4efbaabd16d582a5547c47ee1c7730b/charset_normalizer-3.4.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b256ee2e749283ef3ddcff51a675ff43798d92d746d1a6e4631bf8c707d22d0b", size = 204483, upload-time = "2025-08-09T07:55:53.12Z" },
    +    { url = "https://files.pythonhosted.org/packages/c7/2a/ae245c41c06299ec18262825c1569c5d3298fc920e4ddf56ab011b417efd/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:13faeacfe61784e2559e690fc53fa4c5ae97c6fcedb8eb6fb8d0a15b475d2c64", size = 145520, upload-time = "2025-08-09T07:55:54.712Z" },
    +    { url = "https://files.pythonhosted.org/packages/3a/a4/b3b6c76e7a635748c4421d2b92c7b8f90a432f98bda5082049af37ffc8e3/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:00237675befef519d9af72169d8604a067d92755e84fe76492fef5441db05b91", size = 158876, upload-time = "2025-08-09T07:55:56.024Z" },
    +    { url = "https://files.pythonhosted.org/packages/e2/e6/63bb0e10f90a8243c5def74b5b105b3bbbfb3e7bb753915fe333fb0c11ea/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:585f3b2a80fbd26b048a0be90c5aae8f06605d3c92615911c3a2b03a8a3b796f", size = 156083, upload-time = "2025-08-09T07:55:57.582Z" },
    +    { url = "https://files.pythonhosted.org/packages/87/df/b7737ff046c974b183ea9aa111b74185ac8c3a326c6262d413bd5a1b8c69/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e78314bdc32fa80696f72fa16dc61168fda4d6a0c014e0380f9d02f0e5d8a07", size = 150295, upload-time = "2025-08-09T07:55:59.147Z" },
    +    { url = "https://files.pythonhosted.org/packages/61/f1/190d9977e0084d3f1dc169acd060d479bbbc71b90bf3e7bf7b9927dec3eb/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:96b2b3d1a83ad55310de8c7b4a2d04d9277d5591f40761274856635acc5fcb30", size = 148379, upload-time = "2025-08-09T07:56:00.364Z" },
    +    { url = "https://files.pythonhosted.org/packages/4c/92/27dbe365d34c68cfe0ca76f1edd70e8705d82b378cb54ebbaeabc2e3029d/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:939578d9d8fd4299220161fdd76e86c6a251987476f5243e8864a7844476ba14", size = 160018, upload-time = "2025-08-09T07:56:01.678Z" },
    +    { url = "https://files.pythonhosted.org/packages/99/04/baae2a1ea1893a01635d475b9261c889a18fd48393634b6270827869fa34/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:fd10de089bcdcd1be95a2f73dbe6254798ec1bda9f450d5828c96f93e2536b9c", size = 157430, upload-time = "2025-08-09T07:56:02.87Z" },
    +    { url = "https://files.pythonhosted.org/packages/2f/36/77da9c6a328c54d17b960c89eccacfab8271fdaaa228305330915b88afa9/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1e8ac75d72fa3775e0b7cb7e4629cec13b7514d928d15ef8ea06bca03ef01cae", size = 151600, upload-time = "2025-08-09T07:56:04.089Z" },
    +    { url = "https://files.pythonhosted.org/packages/64/d4/9eb4ff2c167edbbf08cdd28e19078bf195762e9bd63371689cab5ecd3d0d/charset_normalizer-3.4.3-cp311-cp311-win32.whl", hash = "sha256:6cf8fd4c04756b6b60146d98cd8a77d0cdae0e1ca20329da2ac85eed779b6849", size = 99616, upload-time = "2025-08-09T07:56:05.658Z" },
    +    { url = "https://files.pythonhosted.org/packages/f4/9c/996a4a028222e7761a96634d1820de8a744ff4327a00ada9c8942033089b/charset_normalizer-3.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:31a9a6f775f9bcd865d88ee350f0ffb0e25936a7f930ca98995c05abf1faf21c", size = 107108, upload-time = "2025-08-09T07:56:07.176Z" },
    +    { url = "https://files.pythonhosted.org/packages/e9/5e/14c94999e418d9b87682734589404a25854d5f5d0408df68bc15b6ff54bb/charset_normalizer-3.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e28e334d3ff134e88989d90ba04b47d84382a828c061d0d1027b1b12a62b39b1", size = 205655, upload-time = "2025-08-09T07:56:08.475Z" },
    +    { url = "https://files.pythonhosted.org/packages/7d/a8/c6ec5d389672521f644505a257f50544c074cf5fc292d5390331cd6fc9c3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0cacf8f7297b0c4fcb74227692ca46b4a5852f8f4f24b3c766dd94a1075c4884", size = 146223, upload-time = "2025-08-09T07:56:09.708Z" },
    +    { url = "https://files.pythonhosted.org/packages/fc/eb/a2ffb08547f4e1e5415fb69eb7db25932c52a52bed371429648db4d84fb1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c6fd51128a41297f5409deab284fecbe5305ebd7e5a1f959bee1c054622b7018", size = 159366, upload-time = "2025-08-09T07:56:11.326Z" },
    +    { url = "https://files.pythonhosted.org/packages/82/10/0fd19f20c624b278dddaf83b8464dcddc2456cb4b02bb902a6da126b87a1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cfb2aad70f2c6debfbcb717f23b7eb55febc0bb23dcffc0f076009da10c6392", size = 157104, upload-time = "2025-08-09T07:56:13.014Z" },
    +    { url = "https://files.pythonhosted.org/packages/16/ab/0233c3231af734f5dfcf0844aa9582d5a1466c985bbed6cedab85af9bfe3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1606f4a55c0fd363d754049cdf400175ee96c992b1f8018b993941f221221c5f", size = 151830, upload-time = "2025-08-09T07:56:14.428Z" },
    +    { url = "https://files.pythonhosted.org/packages/ae/02/e29e22b4e02839a0e4a06557b1999d0a47db3567e82989b5bb21f3fbbd9f/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:027b776c26d38b7f15b26a5da1044f376455fb3766df8fc38563b4efbc515154", size = 148854, upload-time = "2025-08-09T07:56:16.051Z" },
    +    { url = "https://files.pythonhosted.org/packages/05/6b/e2539a0a4be302b481e8cafb5af8792da8093b486885a1ae4d15d452bcec/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:42e5088973e56e31e4fa58eb6bd709e42fc03799c11c42929592889a2e54c491", size = 160670, upload-time = "2025-08-09T07:56:17.314Z" },
    +    { url = "https://files.pythonhosted.org/packages/31/e7/883ee5676a2ef217a40ce0bffcc3d0dfbf9e64cbcfbdf822c52981c3304b/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cc34f233c9e71701040d772aa7490318673aa7164a0efe3172b2981218c26d93", size = 158501, upload-time = "2025-08-09T07:56:18.641Z" },
    +    { url = "https://files.pythonhosted.org/packages/c1/35/6525b21aa0db614cf8b5792d232021dca3df7f90a1944db934efa5d20bb1/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:320e8e66157cc4e247d9ddca8e21f427efc7a04bbd0ac8a9faf56583fa543f9f", size = 153173, upload-time = "2025-08-09T07:56:20.289Z" },
    +    { url = "https://files.pythonhosted.org/packages/50/ee/f4704bad8201de513fdc8aac1cabc87e38c5818c93857140e06e772b5892/charset_normalizer-3.4.3-cp312-cp312-win32.whl", hash = "sha256:fb6fecfd65564f208cbf0fba07f107fb661bcd1a7c389edbced3f7a493f70e37", size = 99822, upload-time = "2025-08-09T07:56:21.551Z" },
    +    { url = "https://files.pythonhosted.org/packages/39/f5/3b3836ca6064d0992c58c7561c6b6eee1b3892e9665d650c803bd5614522/charset_normalizer-3.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:86df271bf921c2ee3818f0522e9a5b8092ca2ad8b065ece5d7d9d0e9f4849bcc", size = 107543, upload-time = "2025-08-09T07:56:23.115Z" },
    +    { url = "https://files.pythonhosted.org/packages/65/ca/2135ac97709b400c7654b4b764daf5c5567c2da45a30cdd20f9eefe2d658/charset_normalizer-3.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:14c2a87c65b351109f6abfc424cab3927b3bdece6f706e4d12faaf3d52ee5efe", size = 205326, upload-time = "2025-08-09T07:56:24.721Z" },
    +    { url = "https://files.pythonhosted.org/packages/71/11/98a04c3c97dd34e49c7d247083af03645ca3730809a5509443f3c37f7c99/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41d1fc408ff5fdfb910200ec0e74abc40387bccb3252f3f27c0676731df2b2c8", size = 146008, upload-time = "2025-08-09T07:56:26.004Z" },
    +    { url = "https://files.pythonhosted.org/packages/60/f5/4659a4cb3c4ec146bec80c32d8bb16033752574c20b1252ee842a95d1a1e/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1bb60174149316da1c35fa5233681f7c0f9f514509b8e399ab70fea5f17e45c9", size = 159196, upload-time = "2025-08-09T07:56:27.25Z" },
    +    { url = "https://files.pythonhosted.org/packages/86/9e/f552f7a00611f168b9a5865a1414179b2c6de8235a4fa40189f6f79a1753/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:30d006f98569de3459c2fc1f2acde170b7b2bd265dc1943e87e1a4efe1b67c31", size = 156819, upload-time = "2025-08-09T07:56:28.515Z" },
    +    { url = "https://files.pythonhosted.org/packages/7e/95/42aa2156235cbc8fa61208aded06ef46111c4d3f0de233107b3f38631803/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:416175faf02e4b0810f1f38bcb54682878a4af94059a1cd63b8747244420801f", size = 151350, upload-time = "2025-08-09T07:56:29.716Z" },
    +    { url = "https://files.pythonhosted.org/packages/c2/a9/3865b02c56f300a6f94fc631ef54f0a8a29da74fb45a773dfd3dcd380af7/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6aab0f181c486f973bc7262a97f5aca3ee7e1437011ef0c2ec04b5a11d16c927", size = 148644, upload-time = "2025-08-09T07:56:30.984Z" },
    +    { url = "https://files.pythonhosted.org/packages/77/d9/cbcf1a2a5c7d7856f11e7ac2d782aec12bdfea60d104e60e0aa1c97849dc/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabf8315679312cfa71302f9bd509ded4f2f263fb5b765cf1433b39106c3cc9", size = 160468, upload-time = "2025-08-09T07:56:32.252Z" },
    +    { url = "https://files.pythonhosted.org/packages/f6/42/6f45efee8697b89fda4d50580f292b8f7f9306cb2971d4b53f8914e4d890/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:bd28b817ea8c70215401f657edef3a8aa83c29d447fb0b622c35403780ba11d5", size = 158187, upload-time = "2025-08-09T07:56:33.481Z" },
    +    { url = "https://files.pythonhosted.org/packages/70/99/f1c3bdcfaa9c45b3ce96f70b14f070411366fa19549c1d4832c935d8e2c3/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:18343b2d246dc6761a249ba1fb13f9ee9a2bcd95decc767319506056ea4ad4dc", size = 152699, upload-time = "2025-08-09T07:56:34.739Z" },
    +    { url = "https://files.pythonhosted.org/packages/a3/ad/b0081f2f99a4b194bcbb1934ef3b12aa4d9702ced80a37026b7607c72e58/charset_normalizer-3.4.3-cp313-cp313-win32.whl", hash = "sha256:6fb70de56f1859a3f71261cbe41005f56a7842cc348d3aeb26237560bfa5e0ce", size = 99580, upload-time = "2025-08-09T07:56:35.981Z" },
    +    { url = "https://files.pythonhosted.org/packages/9a/8f/ae790790c7b64f925e5c953b924aaa42a243fb778fed9e41f147b2a5715a/charset_normalizer-3.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:cf1ebb7d78e1ad8ec2a8c4732c7be2e736f6e5123a4146c5b89c9d1f585f8cef", size = 107366, upload-time = "2025-08-09T07:56:37.339Z" },
    +    { url = "https://files.pythonhosted.org/packages/8e/91/b5a06ad970ddc7a0e513112d40113e834638f4ca1120eb727a249fb2715e/charset_normalizer-3.4.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3cd35b7e8aedeb9e34c41385fda4f73ba609e561faedfae0a9e75e44ac558a15", size = 204342, upload-time = "2025-08-09T07:56:38.687Z" },
    +    { url = "https://files.pythonhosted.org/packages/ce/ec/1edc30a377f0a02689342f214455c3f6c2fbedd896a1d2f856c002fc3062/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b89bc04de1d83006373429975f8ef9e7932534b8cc9ca582e4db7d20d91816db", size = 145995, upload-time = "2025-08-09T07:56:40.048Z" },
    +    { url = "https://files.pythonhosted.org/packages/17/e5/5e67ab85e6d22b04641acb5399c8684f4d37caf7558a53859f0283a650e9/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2001a39612b241dae17b4687898843f254f8748b796a2e16f1051a17078d991d", size = 158640, upload-time = "2025-08-09T07:56:41.311Z" },
    +    { url = "https://files.pythonhosted.org/packages/f1/e5/38421987f6c697ee3722981289d554957c4be652f963d71c5e46a262e135/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8dcfc373f888e4fb39a7bc57e93e3b845e7f462dacc008d9749568b1c4ece096", size = 156636, upload-time = "2025-08-09T07:56:43.195Z" },
    +    { url = "https://files.pythonhosted.org/packages/a0/e4/5a075de8daa3ec0745a9a3b54467e0c2967daaaf2cec04c845f73493e9a1/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18b97b8404387b96cdbd30ad660f6407799126d26a39ca65729162fd810a99aa", size = 150939, upload-time = "2025-08-09T07:56:44.819Z" },
    +    { url = "https://files.pythonhosted.org/packages/02/f7/3611b32318b30974131db62b4043f335861d4d9b49adc6d57c1149cc49d4/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ccf600859c183d70eb47e05a44cd80a4ce77394d1ac0f79dbd2dd90a69a3a049", size = 148580, upload-time = "2025-08-09T07:56:46.684Z" },
    +    { url = "https://files.pythonhosted.org/packages/7e/61/19b36f4bd67f2793ab6a99b979b4e4f3d8fc754cbdffb805335df4337126/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:53cd68b185d98dde4ad8990e56a58dea83a4162161b1ea9272e5c9182ce415e0", size = 159870, upload-time = "2025-08-09T07:56:47.941Z" },
    +    { url = "https://files.pythonhosted.org/packages/06/57/84722eefdd338c04cf3030ada66889298eaedf3e7a30a624201e0cbe424a/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:30a96e1e1f865f78b030d65241c1ee850cdf422d869e9028e2fc1d5e4db73b92", size = 157797, upload-time = "2025-08-09T07:56:49.756Z" },
    +    { url = "https://files.pythonhosted.org/packages/72/2a/aff5dd112b2f14bcc3462c312dce5445806bfc8ab3a7328555da95330e4b/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d716a916938e03231e86e43782ca7878fb602a125a91e7acb8b5112e2e96ac16", size = 152224, upload-time = "2025-08-09T07:56:51.369Z" },
    +    { url = "https://files.pythonhosted.org/packages/b7/8c/9839225320046ed279c6e839d51f028342eb77c91c89b8ef2549f951f3ec/charset_normalizer-3.4.3-cp314-cp314-win32.whl", hash = "sha256:c6dbd0ccdda3a2ba7c2ecd9d77b37f3b5831687d8dc1b6ca5f56a4880cc7b7ce", size = 100086, upload-time = "2025-08-09T07:56:52.722Z" },
    +    { url = "https://files.pythonhosted.org/packages/ee/7a/36fbcf646e41f710ce0a563c1c9a343c6edf9be80786edeb15b6f62e17db/charset_normalizer-3.4.3-cp314-cp314-win_amd64.whl", hash = "sha256:73dc19b562516fc9bcf6e5d6e596df0b4eb98d87e4f79f3ae71840e6ed21361c", size = 107400, upload-time = "2025-08-09T07:56:55.172Z" },
    +    { url = "https://files.pythonhosted.org/packages/8a/1f/f041989e93b001bc4e44bb1669ccdcf54d3f00e628229a85b08d330615c5/charset_normalizer-3.4.3-py3-none-any.whl", hash = "sha256:ce571ab16d890d23b5c278547ba694193a45011ff86a9162a71307ed9f86759a", size = 53175, upload-time = "2025-08-09T07:57:26.864Z" },
     ]
     
     [[package]]
    @@ -320,6 +323,10 @@ evg = [
     format = [
         { name = "clang-format" },
     ]
    +format-scripts = [
    +    { name = "ruff" },
    +    { name = "shfmt-py" },
    +]
     
     [package.metadata]
     
    @@ -346,6 +353,10 @@ evg = [
         { name = "yamlloader", specifier = ">=1.5" },
     ]
     format = [{ name = "clang-format", specifier = "~=20.1.0" }]
    +format-scripts = [
    +    { name = "ruff", specifier = ">=0.13.0" },
    +    { name = "shfmt-py", specifier = ">=3.12.0.2" },
    +]
     
     [[package]]
     name = "packaging"
    @@ -525,7 +536,7 @@ wheels = [
     
     [[package]]
     name = "requests"
    -version = "2.32.4"
    +version = "2.32.5"
     source = { registry = "https://pypi.org/simple" }
     dependencies = [
         { name = "certifi" },
    @@ -533,9 +544,9 @@ dependencies = [
         { name = "idna" },
         { name = "urllib3" },
     ]
    -sdist = { url = "https://files.pythonhosted.org/packages/e1/0a/929373653770d8a0d7ea76c37de6e41f11eb07559b103b1c02cafb3f7cf8/requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422", size = 135258, upload-time = "2025-06-09T16:43:07.34Z" }
    +sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" }
     wheels = [
    -    { url = "https://files.pythonhosted.org/packages/7c/e4/56027c4a6b4ae70ca9de302488c5ca95ad4a39e190093d6c1a8ace08341b/requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c", size = 64847, upload-time = "2025-06-09T16:43:05.728Z" },
    +    { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" },
     ]
     
     [[package]]
    @@ -547,6 +558,38 @@ wheels = [
         { url = "https://files.pythonhosted.org/packages/53/97/d2cbbaa10c9b826af0e10fdf836e1bf344d9f0abb873ebc34d1f49642d3f/roman_numerals_py-3.1.0-py3-none-any.whl", hash = "sha256:9da2ad2fb670bcf24e81070ceb3be72f6c11c440d73bd579fbeca1e9f330954c", size = 7742, upload-time = "2025-02-22T07:34:52.422Z" },
     ]
     
    +[[package]]
    +name = "ruff"
    +version = "0.13.0"
    +source = { registry = "https://pypi.org/simple" }
    +sdist = { url = "https://files.pythonhosted.org/packages/6e/1a/1f4b722862840295bcaba8c9e5261572347509548faaa99b2d57ee7bfe6a/ruff-0.13.0.tar.gz", hash = "sha256:5b4b1ee7eb35afae128ab94459b13b2baaed282b1fb0f472a73c82c996c8ae60", size = 5372863, upload-time = "2025-09-10T16:25:37.917Z" }
    +wheels = [
    +    { url = "https://files.pythonhosted.org/packages/ac/fe/6f87b419dbe166fd30a991390221f14c5b68946f389ea07913e1719741e0/ruff-0.13.0-py3-none-linux_armv6l.whl", hash = "sha256:137f3d65d58ee828ae136a12d1dc33d992773d8f7644bc6b82714570f31b2004", size = 12187826, upload-time = "2025-09-10T16:24:39.5Z" },
    +    { url = "https://files.pythonhosted.org/packages/e4/25/c92296b1fc36d2499e12b74a3fdb230f77af7bdf048fad7b0a62e94ed56a/ruff-0.13.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:21ae48151b66e71fd111b7d79f9ad358814ed58c339631450c66a4be33cc28b9", size = 12933428, upload-time = "2025-09-10T16:24:43.866Z" },
    +    { url = "https://files.pythonhosted.org/packages/44/cf/40bc7221a949470307d9c35b4ef5810c294e6cfa3caafb57d882731a9f42/ruff-0.13.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:64de45f4ca5441209e41742d527944635a05a6e7c05798904f39c85bafa819e3", size = 12095543, upload-time = "2025-09-10T16:24:46.638Z" },
    +    { url = "https://files.pythonhosted.org/packages/f1/03/8b5ff2a211efb68c63a1d03d157e924997ada87d01bebffbd13a0f3fcdeb/ruff-0.13.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b2c653ae9b9d46e0ef62fc6fbf5b979bda20a0b1d2b22f8f7eb0cde9f4963b8", size = 12312489, upload-time = "2025-09-10T16:24:49.556Z" },
    +    { url = "https://files.pythonhosted.org/packages/37/fc/2336ef6d5e9c8d8ea8305c5f91e767d795cd4fc171a6d97ef38a5302dadc/ruff-0.13.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4cec632534332062bc9eb5884a267b689085a1afea9801bf94e3ba7498a2d207", size = 11991631, upload-time = "2025-09-10T16:24:53.439Z" },
    +    { url = "https://files.pythonhosted.org/packages/39/7f/f6d574d100fca83d32637d7f5541bea2f5e473c40020bbc7fc4a4d5b7294/ruff-0.13.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dcd628101d9f7d122e120ac7c17e0a0f468b19bc925501dbe03c1cb7f5415b24", size = 13720602, upload-time = "2025-09-10T16:24:56.392Z" },
    +    { url = "https://files.pythonhosted.org/packages/fd/c8/a8a5b81d8729b5d1f663348d11e2a9d65a7a9bd3c399763b1a51c72be1ce/ruff-0.13.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:afe37db8e1466acb173bb2a39ca92df00570e0fd7c94c72d87b51b21bb63efea", size = 14697751, upload-time = "2025-09-10T16:24:59.89Z" },
    +    { url = "https://files.pythonhosted.org/packages/57/f5/183ec292272ce7ec5e882aea74937f7288e88ecb500198b832c24debc6d3/ruff-0.13.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f96a8d90bb258d7d3358b372905fe7333aaacf6c39e2408b9f8ba181f4b6ef2", size = 14095317, upload-time = "2025-09-10T16:25:03.025Z" },
    +    { url = "https://files.pythonhosted.org/packages/9f/8d/7f9771c971724701af7926c14dab31754e7b303d127b0d3f01116faef456/ruff-0.13.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b5e3d883e4f924c5298e3f2ee0f3085819c14f68d1e5b6715597681433f153", size = 13144418, upload-time = "2025-09-10T16:25:06.272Z" },
    +    { url = "https://files.pythonhosted.org/packages/a8/a6/7985ad1778e60922d4bef546688cd8a25822c58873e9ff30189cfe5dc4ab/ruff-0.13.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03447f3d18479df3d24917a92d768a89f873a7181a064858ea90a804a7538991", size = 13370843, upload-time = "2025-09-10T16:25:09.965Z" },
    +    { url = "https://files.pythonhosted.org/packages/64/1c/bafdd5a7a05a50cc51d9f5711da704942d8dd62df3d8c70c311e98ce9f8a/ruff-0.13.0-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:fbc6b1934eb1c0033da427c805e27d164bb713f8e273a024a7e86176d7f462cf", size = 13321891, upload-time = "2025-09-10T16:25:12.969Z" },
    +    { url = "https://files.pythonhosted.org/packages/bc/3e/7817f989cb9725ef7e8d2cee74186bf90555279e119de50c750c4b7a72fe/ruff-0.13.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:a8ab6a3e03665d39d4a25ee199d207a488724f022db0e1fe4002968abdb8001b", size = 12119119, upload-time = "2025-09-10T16:25:16.621Z" },
    +    { url = "https://files.pythonhosted.org/packages/58/07/9df080742e8d1080e60c426dce6e96a8faf9a371e2ce22eef662e3839c95/ruff-0.13.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d2a5c62f8ccc6dd2fe259917482de7275cecc86141ee10432727c4816235bc41", size = 11961594, upload-time = "2025-09-10T16:25:19.49Z" },
    +    { url = "https://files.pythonhosted.org/packages/6a/f4/ae1185349197d26a2316840cb4d6c3fba61d4ac36ed728bf0228b222d71f/ruff-0.13.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:b7b85ca27aeeb1ab421bc787009831cffe6048faae08ad80867edab9f2760945", size = 12933377, upload-time = "2025-09-10T16:25:22.371Z" },
    +    { url = "https://files.pythonhosted.org/packages/b6/39/e776c10a3b349fc8209a905bfb327831d7516f6058339a613a8d2aaecacd/ruff-0.13.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:79ea0c44a3032af768cabfd9616e44c24303af49d633b43e3a5096e009ebe823", size = 13418555, upload-time = "2025-09-10T16:25:25.681Z" },
    +    { url = "https://files.pythonhosted.org/packages/46/09/dca8df3d48e8b3f4202bf20b1658898e74b6442ac835bfe2c1816d926697/ruff-0.13.0-py3-none-win32.whl", hash = "sha256:4e473e8f0e6a04e4113f2e1de12a5039579892329ecc49958424e5568ef4f768", size = 12141613, upload-time = "2025-09-10T16:25:28.664Z" },
    +    { url = "https://files.pythonhosted.org/packages/61/21/0647eb71ed99b888ad50e44d8ec65d7148babc0e242d531a499a0bbcda5f/ruff-0.13.0-py3-none-win_amd64.whl", hash = "sha256:48e5c25c7a3713eea9ce755995767f4dcd1b0b9599b638b12946e892123d1efb", size = 13258250, upload-time = "2025-09-10T16:25:31.773Z" },
    +    { url = "https://files.pythonhosted.org/packages/e1/a3/03216a6a86c706df54422612981fb0f9041dbb452c3401501d4a22b942c9/ruff-0.13.0-py3-none-win_arm64.whl", hash = "sha256:ab80525317b1e1d38614addec8ac954f1b3e662de9d59114ecbf771d00cf613e", size = 12312357, upload-time = "2025-09-10T16:25:35.595Z" },
    +]
    +
    +[[package]]
    +name = "shfmt-py"
    +version = "3.12.0.2"
    +source = { registry = "https://pypi.org/simple" }
    +sdist = { url = "https://files.pythonhosted.org/packages/af/d9/a1ea26749bd19467e9fbfe7b34e6c8df517e01de4028a45b954eebe8c03b/shfmt_py-3.12.0.2.tar.gz", hash = "sha256:6a0dc675b37d000eb236609cf15aedd9e7a538927ea02c57b617908b6f237e9c", size = 4467, upload-time = "2025-07-08T06:54:40.396Z" }
    +
     [[package]]
     name = "shrub-py"
     version = "3.10.0"
    @@ -582,11 +625,11 @@ wheels = [
     
     [[package]]
     name = "soupsieve"
    -version = "2.7"
    +version = "2.8"
     source = { registry = "https://pypi.org/simple" }
    -sdist = { url = "https://files.pythonhosted.org/packages/3f/f4/4a80cd6ef364b2e8b65b15816a843c0980f7a5a2b4dc701fc574952aa19f/soupsieve-2.7.tar.gz", hash = "sha256:ad282f9b6926286d2ead4750552c8a6142bc4c783fd66b0293547c8fe6ae126a", size = 103418, upload-time = "2025-04-20T18:50:08.518Z" }
    +sdist = { url = "https://files.pythonhosted.org/packages/6d/e6/21ccce3262dd4889aa3332e5a119a3491a95e8f60939870a3a035aabac0d/soupsieve-2.8.tar.gz", hash = "sha256:e2dd4a40a628cb5f28f6d4b0db8800b8f581b65bb380b97de22ba5ca8d72572f", size = 103472, upload-time = "2025-08-27T15:39:51.78Z" }
     wheels = [
    -    { url = "https://files.pythonhosted.org/packages/e7/9c/0e6afc12c269578be5c0c1c9f4b49a8d32770a080260c333ac04cc1c832d/soupsieve-2.7-py3-none-any.whl", hash = "sha256:6e60cc5c1ffaf1cebcc12e8188320b72071e922c2e897f737cadce79ad5d30c4", size = 36677, upload-time = "2025-04-20T18:50:07.196Z" },
    +    { url = "https://files.pythonhosted.org/packages/14/a0/bb38d3b76b8cae341dad93a2dd83ab7462e6dbcdd84d43f54ee60a8dc167/soupsieve-2.8-py3-none-any.whl", hash = "sha256:0cc76456a30e20f5d7f2e14a98a4ae2ee4e5abdc7c5ea0aafe795f344bc7984c", size = 36679, upload-time = "2025-08-27T15:39:50.179Z" },
     ]
     
     [[package]]
    @@ -772,11 +815,11 @@ wheels = [
     
     [[package]]
     name = "typing-extensions"
    -version = "4.14.1"
    +version = "4.15.0"
     source = { registry = "https://pypi.org/simple" }
    -sdist = { url = "https://files.pythonhosted.org/packages/98/5a/da40306b885cc8c09109dc2e1abd358d5684b1425678151cdaed4731c822/typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36", size = 107673, upload-time = "2025-07-04T13:28:34.16Z" }
    +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" }
     wheels = [
    -    { url = "https://files.pythonhosted.org/packages/b5/00/d631e67a838026495268c2f6884f3711a15a9a2a96cd244fdaea53b823fb/typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76", size = 43906, upload-time = "2025-07-04T13:28:32.743Z" },
    +    { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" },
     ]
     
     [[package]]
    @@ -802,9 +845,12 @@ wheels = [
     
     [[package]]
     name = "yamlloader"
    -version = "1.5.1"
    +version = "1.5.2"
     source = { registry = "https://pypi.org/simple" }
    -sdist = { url = "https://files.pythonhosted.org/packages/6f/6b/7f094b4d9009794cb69dea18c99bdee4e62b4e0ee69bb19128178191f18a/yamlloader-1.5.1.tar.gz", hash = "sha256:8dece19b050acb1c6a8ca14aa30793388f9be154f734b826541f9a1828d41cec", size = 77157, upload-time = "2025-01-01T17:31:36.453Z" }
    +dependencies = [
    +    { name = "pyyaml" },
    +]
    +sdist = { url = "https://files.pythonhosted.org/packages/b1/ab/a0cdfe8081e0fb9dd15372f8ccb257541d197450f6047a366fef597bb1bf/yamlloader-1.5.2.tar.gz", hash = "sha256:c10ac1321a626b1ca8b09d3f3afe9855b818391c59992a76b65e4a2d95eac41b", size = 77246, upload-time = "2025-08-05T13:23:47.62Z" }
     wheels = [
    -    { url = "https://files.pythonhosted.org/packages/68/0f/28323a9c418403cd4080d1736873c354bf058ad34306be5ff58639dcaedd/yamlloader-1.5.1-py3-none-any.whl", hash = "sha256:610014b14e25d7328d69f6526524d4616a552561f4c1b919f1282695bc1f4a11", size = 7684, upload-time = "2025-01-01T17:31:33.627Z" },
    +    { url = "https://files.pythonhosted.org/packages/c2/a3/bdf1a1dd0ddb658332dbe53194624c668c7935b5c60b2d36dae4018053ec/yamlloader-1.5.2-py3-none-any.whl", hash = "sha256:5855d320d55d55f4309f69798b7cd6f55f70739a0f3d4bbe78aeb42c204c2d81", size = 7774, upload-time = "2025-08-05T13:23:46.455Z" },
     ]