diff --git a/airbyte_cdk/connector_builder/main.py b/airbyte_cdk/connector_builder/main.py
index ad2d6650f..80cf4afa9 100644
--- a/airbyte_cdk/connector_builder/main.py
+++ b/airbyte_cdk/connector_builder/main.py
@@ -78,9 +78,9 @@ def handle_connector_builder_request(
     if command == "resolve_manifest":
         return resolve_manifest(source)
     elif command == "test_read":
-        assert (
-            catalog is not None
-        ), "`test_read` requires a valid `ConfiguredAirbyteCatalog`, got None."
+        assert catalog is not None, (
+            "`test_read` requires a valid `ConfiguredAirbyteCatalog`, got None."
+        )
         return read_stream(source, config, catalog, state, limits)
     elif command == "full_resolve_manifest":
         return full_resolve_manifest(source, limits)
diff --git a/airbyte_cdk/sources/concurrent_source/concurrent_source.py b/airbyte_cdk/sources/concurrent_source/concurrent_source.py
index bc7d97cdd..ffdee2dc1 100644
--- a/airbyte_cdk/sources/concurrent_source/concurrent_source.py
+++ b/airbyte_cdk/sources/concurrent_source/concurrent_source.py
@@ -49,9 +49,9 @@ def create(
         too_many_generator = (
             not is_single_threaded and initial_number_of_partitions_to_generate >= num_workers
         )
-        assert (
-            not too_many_generator
-        ), "It is required to have more workers than threads generating partitions"
+        assert not too_many_generator, (
+            "It is required to have more workers than threads generating partitions"
+        )
         threadpool = ThreadPoolManager(
             concurrent.futures.ThreadPoolExecutor(
                 max_workers=num_workers, thread_name_prefix="workerpool"
diff --git a/airbyte_cdk/sources/file_based/file_based_source.py b/airbyte_cdk/sources/file_based/file_based_source.py
index 17a7ee957..2d34fe5dc 100644
--- a/airbyte_cdk/sources/file_based/file_based_source.py
+++ b/airbyte_cdk/sources/file_based/file_based_source.py
@@ -282,9 +282,9 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]:
                     and hasattr(self, "_concurrency_level")
                     and self._concurrency_level is not None
                 ):
-                    assert (
-                        state_manager is not None
-                    ), "No ConnectorStateManager was created, but it is required for incremental syncs. This is unexpected. Please contact Support."
+                    assert state_manager is not None, (
+                        "No ConnectorStateManager was created, but it is required for incremental syncs. This is unexpected. Please contact Support."
+                    )
 
                     cursor = self.cursor_cls(
                         stream_config,
diff --git a/airbyte_cdk/sources/file_based/file_types/avro_parser.py b/airbyte_cdk/sources/file_based/file_types/avro_parser.py
index e1aa2c4cb..85e5afa22 100644
--- a/airbyte_cdk/sources/file_based/file_types/avro_parser.py
+++ b/airbyte_cdk/sources/file_based/file_types/avro_parser.py
@@ -154,7 +154,7 @@ def _convert_avro_type_to_json(
                 # For example: ^-?\d{1,5}(?:\.\d{1,3})?$ would accept 12345.123 and 123456.12345 would  be rejected
                 return {
                     "type": "string",
-                    "pattern": f"^-?\\d{{{1,max_whole_number_range}}}(?:\\.\\d{1,decimal_range})?$",
+                    "pattern": f"^-?\\d{{{1, max_whole_number_range}}}(?:\\.\\d{1, decimal_range})?$",
                 }
             elif "logicalType" in avro_field:
                 if avro_field["logicalType"] not in AVRO_LOGICAL_TYPE_TO_JSON:
diff --git a/airbyte_cdk/sources/file_based/stream/concurrent/adapters.py b/airbyte_cdk/sources/file_based/stream/concurrent/adapters.py
index fb0efc82c..f02602d58 100644
--- a/airbyte_cdk/sources/file_based/stream/concurrent/adapters.py
+++ b/airbyte_cdk/sources/file_based/stream/concurrent/adapters.py
@@ -284,9 +284,9 @@ def read(self) -> Iterable[Record]:
     def to_slice(self) -> Optional[Mapping[str, Any]]:
         if self._slice is None:
             return None
-        assert (
-            len(self._slice["files"]) == 1
-        ), f"Expected 1 file per partition but got {len(self._slice['files'])} for stream {self.stream_name()}"
+        assert len(self._slice["files"]) == 1, (
+            f"Expected 1 file per partition but got {len(self._slice['files'])} for stream {self.stream_name()}"
+        )
         file = self._slice["files"][0]
         return {"files": [file]}
 
diff --git a/airbyte_cdk/sql/shared/sql_processor.py b/airbyte_cdk/sql/shared/sql_processor.py
index 5fd0a5e46..a53925206 100644
--- a/airbyte_cdk/sql/shared/sql_processor.py
+++ b/airbyte_cdk/sql/shared/sql_processor.py
@@ -326,9 +326,9 @@ def _ensure_schema_exists(
 
         if DEBUG_MODE:
             found_schemas = schemas_list
-            assert (
-                schema_name in found_schemas
-            ), f"Schema {schema_name} was not created. Found: {found_schemas}"
+            assert schema_name in found_schemas, (
+                f"Schema {schema_name} was not created. Found: {found_schemas}"
+            )
 
     def _quote_identifier(self, identifier: str) -> str:
         """Return the given identifier, quoted."""
@@ -617,10 +617,10 @@ def _append_temp_table_to_final_table(
         self._execute_sql(
             f"""
             INSERT INTO {self._fully_qualified(final_table_name)} (
-            {f',{nl}  '.join(columns)}
+            {f",{nl}  ".join(columns)}
             )
             SELECT
-            {f',{nl}  '.join(columns)}
+            {f",{nl}  ".join(columns)}
             FROM {self._fully_qualified(temp_table_name)}
             """,
         )
@@ -645,8 +645,7 @@ def _swap_temp_table_with_final_table(
         deletion_name = f"{final_table_name}_deleteme"
         commands = "\n".join(
             [
-                f"ALTER TABLE {self._fully_qualified(final_table_name)} RENAME "
-                f"TO {deletion_name};",
+                f"ALTER TABLE {self._fully_qualified(final_table_name)} RENAME TO {deletion_name};",
                 f"ALTER TABLE {self._fully_qualified(temp_table_name)} RENAME "
                 f"TO {final_table_name};",
                 f"DROP TABLE {self._fully_qualified(deletion_name)};",
@@ -686,10 +685,10 @@ def _merge_temp_table_to_final_table(
                 {set_clause}
             WHEN NOT MATCHED THEN INSERT
             (
-                {f',{nl}    '.join(columns)}
+                {f",{nl}    ".join(columns)}
             )
             VALUES (
-                tmp.{f',{nl}    tmp.'.join(columns)}
+                tmp.{f",{nl}    tmp.".join(columns)}
             );
             """,
         )
diff --git a/airbyte_cdk/test/declarative/__init__.py b/airbyte_cdk/test/declarative/__init__.py
new file mode 100644
index 000000000..28ce32c46
--- /dev/null
+++ b/airbyte_cdk/test/declarative/__init__.py
@@ -0,0 +1,6 @@
+# Copyright (c) 2024 Airbyte, Inc., all rights reserved.
+"""Declarative tests framework.
+
+This module provides fixtures and utilities for testing Airbyte sources and destinations
+in a declarative way.
+"""
diff --git a/airbyte_cdk/test/declarative/models/__init__.py b/airbyte_cdk/test/declarative/models/__init__.py
new file mode 100644
index 000000000..446270fb8
--- /dev/null
+++ b/airbyte_cdk/test/declarative/models/__init__.py
@@ -0,0 +1,7 @@
+from airbyte_cdk.test.declarative.models.scenario import (
+    ConnectorTestScenario,
+)
+
+__all__ = [
+    "ConnectorTestScenario",
+]
diff --git a/airbyte_cdk/test/declarative/models/scenario.py b/airbyte_cdk/test/declarative/models/scenario.py
new file mode 100644
index 000000000..b8f0c3dbb
--- /dev/null
+++ b/airbyte_cdk/test/declarative/models/scenario.py
@@ -0,0 +1,74 @@
+# Copyright (c) 2024 Airbyte, Inc., all rights reserved.
+"""Run acceptance tests in PyTest.
+
+These tests leverage the same `acceptance-test-config.yml` configuration files as the
+acceptance tests in CAT, but they run in PyTest instead of CAT. This allows us to run
+the acceptance tests in the same local environment as we are developing in, speeding
+up iteration cycles.
+"""
+
+from __future__ import annotations
+
+from pathlib import Path
+from typing import Any, Literal, cast
+
+import yaml
+from pydantic import BaseModel
+
+
+class ConnectorTestScenario(BaseModel):
+    """Acceptance test instance, as a Pydantic model.
+
+    This class represents an acceptance test instance, which is a single test case
+    that can be run against a connector. It is used to deserialize and validate the
+    acceptance test configuration file.
+    """
+
+    class AcceptanceTestExpectRecords(BaseModel):
+        path: Path
+        exact_order: bool = False
+
+    class AcceptanceTestFileTypes(BaseModel):
+        skip_test: bool
+        bypass_reason: str
+
+    config_path: Path | None = None
+    config_dict: dict[str, Any] | None = None
+
+    id: str | None = None
+
+    configured_catalog_path: Path | None = None
+    timeout_seconds: int | None = None
+    expect_records: AcceptanceTestExpectRecords | None = None
+    file_types: AcceptanceTestFileTypes | None = None
+    status: Literal["succeed", "failed"] | None = None
+
+    def get_config_dict(self) -> dict[str, Any]:
+        """Return the config dictionary.
+
+        If a config dictionary has already been loaded, return it. Otherwise, load
+        the config file and return the dictionary.
+        """
+        if self.config_dict:
+            return self.config_dict
+
+        if self.config_path:
+            return cast(dict[str, Any], yaml.safe_load(self.config_path.read_text()))
+
+        raise ValueError("No config dictionary or path provided.")
+
+    @property
+    def expect_exception(self) -> bool:
+        return self.status and self.status == "failed" or False
+
+    @property
+    def instance_name(self) -> str:
+        return self.config_path.stem if self.config_path else "Unnamed Scenario"
+
+    def __str__(self) -> str:
+        if self.id:
+            return f"'{self.id}' Test Scenario"
+        if self.config_path:
+            return f"'{self.config_path.name}' Test Scenario"
+
+        return f"'{hash(self)}' Test Scenario"
diff --git a/airbyte_cdk/test/declarative/test_suites/__init__.py b/airbyte_cdk/test/declarative/test_suites/__init__.py
new file mode 100644
index 000000000..c6736d62e
--- /dev/null
+++ b/airbyte_cdk/test/declarative/test_suites/__init__.py
@@ -0,0 +1,25 @@
+# Copyright (c) 2024 Airbyte, Inc., all rights reserved.
+"""Declarative test suites.
+
+Here we have base classes for a robust set of declarative connector test suites.
+"""
+
+from airbyte_cdk.test.declarative.test_suites.connector_base import (
+    ConnectorTestScenario,
+    ConnectorTestSuiteBase,
+    generate_tests,
+)
+from airbyte_cdk.test.declarative.test_suites.declarative_sources import (
+    DeclarativeSourceTestSuite,
+)
+from airbyte_cdk.test.declarative.test_suites.destination_base import DestinationTestSuiteBase
+from airbyte_cdk.test.declarative.test_suites.source_base import SourceTestSuiteBase
+
+__all__ = [
+    "ConnectorTestScenario",
+    "ConnectorTestSuiteBase",
+    "DeclarativeSourceTestSuite",
+    "DestinationTestSuiteBase",
+    "SourceTestSuiteBase",
+    "generate_tests",
+]
diff --git a/airbyte_cdk/test/declarative/test_suites/connector_base.py b/airbyte_cdk/test/declarative/test_suites/connector_base.py
new file mode 100644
index 000000000..f89c89f57
--- /dev/null
+++ b/airbyte_cdk/test/declarative/test_suites/connector_base.py
@@ -0,0 +1,223 @@
+# Copyright (c) 2024 Airbyte, Inc., all rights reserved.
+"""Base class for connector test suites."""
+
+from __future__ import annotations
+
+import abc
+import inspect
+import sys
+from collections.abc import Callable
+from pathlib import Path
+from typing import Any, Literal, cast
+
+import pytest
+import yaml
+from boltons.typeutils import classproperty
+
+from airbyte_cdk import Connector
+from airbyte_cdk.models import (
+    AirbyteMessage,
+    Type,
+)
+from airbyte_cdk.test import entrypoint_wrapper
+from airbyte_cdk.test.declarative.models import (
+    ConnectorTestScenario,
+)
+from airbyte_cdk.test.declarative.utils.job_runner import IConnector, run_test_job
+
+ACCEPTANCE_TEST_CONFIG = "acceptance-test-config.yml"
+MANIFEST_YAML = "manifest.yaml"
+
+
+class JavaClass(str):
+    """A string that represents a Java class."""
+
+
+class DockerImage(str):
+    """A string that represents a Docker image."""
+
+
+class RunnableConnector(abc.ABC):
+    """A connector that can be run in a test scenario."""
+
+    @abc.abstractmethod
+    def launch(cls, args: list[str] | None) -> None: ...
+
+
+def generate_tests(metafunc: pytest.Metafunc) -> None:
+    """
+    A helper for pytest_generate_tests hook.
+
+    If a test method (in a class subclassed from our base class)
+    declares an argument 'instance', this function retrieves the
+    'scenarios' attribute from the test class and parametrizes that
+    test with the values from 'scenarios'.
+
+    ## Usage
+
+    ```python
+    from airbyte_cdk.test.declarative.test_suites.connector_base import (
+        generate_tests,
+        ConnectorTestSuiteBase,
+    )
+
+    def pytest_generate_tests(metafunc):
+        generate_tests(metafunc)
+
+    class TestMyConnector(ConnectorTestSuiteBase):
+        ...
+
+    ```
+    """
+    # Check if the test function requires an 'instance' argument
+    if "instance" in metafunc.fixturenames:
+        # Retrieve the test class
+        test_class = metafunc.cls
+        if test_class is None:
+            raise ValueError("Expected a class here.")
+        # Get the 'scenarios' attribute from the class
+        scenarios_attr = getattr(test_class, "get_scenarios", None)
+        if scenarios_attr is None:
+            raise ValueError(
+                f"Test class {test_class} does not have a 'scenarios' attribute. "
+                "Please define the 'scenarios' attribute in the test class."
+            )
+
+        scenarios = test_class.get_scenarios()
+        ids = [str(scenario) for scenario in scenarios]
+        metafunc.parametrize("instance", scenarios, ids=ids)
+
+
+class ConnectorTestSuiteBase(abc.ABC):
+    """Base class for connector test suites."""
+
+    connector: type[IConnector] | Callable[[], IConnector] | None = None
+    """The connector class or a factory function that returns an instance of IConnector."""
+
+    @classmethod
+    def get_test_class_dir(cls) -> Path:
+        """Get the file path that contains the class."""
+        module = sys.modules[cls.__module__]
+        # Get the directory containing the test file
+        return Path(inspect.getfile(module)).parent
+
+    @classmethod
+    def create_connector(
+        cls,
+        scenario: ConnectorTestScenario,
+    ) -> IConnector:
+        """Instantiate the connector class."""
+        connector = cls.connector  # type: ignore
+        if connector:
+            if callable(connector) or isinstance(connector, type):
+                # If the connector is a class or factory function, instantiate it:
+                return cast(IConnector, connector())  # type: ignore [redundant-cast]
+
+        # Otherwise, we can't instantiate the connector. Fail with a clear error message.
+        raise NotImplementedError(
+            "No connector class or connector factory function provided. "
+            "Please provide a class or factory function in `cls.connector`, or "
+            "override `cls.create_connector()` to define a custom initialization process."
+        )
+
+    def run_test_scenario(
+        self,
+        verb: Literal["read", "check", "discover"],
+        test_scenario: ConnectorTestScenario,
+        *,
+        catalog: dict[str, Any] | None = None,
+    ) -> entrypoint_wrapper.EntrypointOutput:
+        """Run a test job from provided CLI args and return the result."""
+        return run_test_job(
+            self.create_connector(test_scenario),
+            verb,
+            test_instance=test_scenario,
+            catalog=catalog,
+        )
+
+    # Test Definitions
+
+    def test_check(
+        self,
+        instance: ConnectorTestScenario,
+    ) -> None:
+        """Run `connection` acceptance tests."""
+        result = self.run_test_scenario(
+            "check",
+            test_scenario=instance,
+        )
+        conn_status_messages: list[AirbyteMessage] = [
+            msg for msg in result._messages if msg.type == Type.CONNECTION_STATUS
+        ]  # noqa: SLF001  # Non-public API
+        assert len(conn_status_messages) == 1, (
+            f"Expected exactly one CONNECTION_STATUS message. Got: {result._messages}"
+        )
+
+    @classmethod
+    def get_connector_root_dir(cls) -> Path:
+        """Get the root directory of the connector."""
+        for parent in cls.get_test_class_dir().parents:
+            if (parent / MANIFEST_YAML).exists():
+                return parent
+            if (parent / ACCEPTANCE_TEST_CONFIG).exists():
+                return parent
+            if parent.name == "airbyte_cdk":
+                break
+        # If we reach here, we didn't find the manifest file in any parent directory
+        # Check if the manifest file exists in the current directory
+        for parent in Path.cwd().parents:
+            if (parent / MANIFEST_YAML).exists():
+                return parent
+            if (parent / ACCEPTANCE_TEST_CONFIG).exists():
+                return parent
+            if parent.name == "airbyte_cdk":
+                break
+
+        raise FileNotFoundError(
+            "Could not find connector root directory relative to "
+            f"'{str(cls.get_test_class_dir())}' or '{str(Path.cwd())}'."
+        )
+
+    @classproperty
+    def acceptance_test_config_path(cls) -> Path:
+        """Get the path to the acceptance test config file."""
+        result = cls.get_connector_root_dir() / ACCEPTANCE_TEST_CONFIG
+        if result.exists():
+            return result
+
+        raise FileNotFoundError(f"Acceptance test config file not found at: {str(result)}")
+
+    @classmethod
+    def get_scenarios(
+        cls,
+    ) -> list[ConnectorTestScenario]:
+        """Get acceptance tests for a given category.
+
+        This has to be a separate function because pytest does not allow
+        parametrization of fixtures with arguments from the test class itself.
+        """
+        category = "connection"
+        all_tests_config = yaml.safe_load(cls.acceptance_test_config_path.read_text())
+        if "acceptance_tests" not in all_tests_config:
+            raise ValueError(
+                f"Acceptance tests config not found in {cls.acceptance_test_config_path}."
+                f" Found only: {str(all_tests_config)}."
+            )
+        if category not in all_tests_config["acceptance_tests"]:
+            return []
+        if "tests" not in all_tests_config["acceptance_tests"][category]:
+            raise ValueError(f"No tests found for category {category}")
+
+        tests_scenarios = [
+            ConnectorTestScenario.model_validate(test)
+            for test in all_tests_config["acceptance_tests"][category]["tests"]
+            if "iam_role" not in test["config_path"]
+        ]
+        connector_root = cls.get_connector_root_dir().absolute()
+        for test in tests_scenarios:
+            if test.config_path:
+                test.config_path = connector_root / test.config_path
+            if test.configured_catalog_path:
+                test.configured_catalog_path = connector_root / test.configured_catalog_path
+
+        return tests_scenarios
diff --git a/airbyte_cdk/test/declarative/test_suites/declarative_sources.py b/airbyte_cdk/test/declarative/test_suites/declarative_sources.py
new file mode 100644
index 000000000..c167ca5d1
--- /dev/null
+++ b/airbyte_cdk/test/declarative/test_suites/declarative_sources.py
@@ -0,0 +1,74 @@
+import os
+from hashlib import md5
+from pathlib import Path
+from typing import Any, cast
+
+import yaml
+from boltons.typeutils import classproperty
+
+from airbyte_cdk.sources.declarative.concurrent_declarative_source import (
+    ConcurrentDeclarativeSource,
+)
+from airbyte_cdk.test.declarative.models import ConnectorTestScenario
+from airbyte_cdk.test.declarative.test_suites.connector_base import MANIFEST_YAML
+from airbyte_cdk.test.declarative.test_suites.source_base import (
+    SourceTestSuiteBase,
+)
+from airbyte_cdk.test.declarative.utils.job_runner import IConnector
+
+
+def md5_checksum(file_path: Path) -> str:
+    with open(file_path, "rb") as file:
+        return md5(file.read()).hexdigest()
+
+
+class DeclarativeSourceTestSuite(SourceTestSuiteBase):
+    @classproperty
+    def manifest_yaml_path(cls) -> Path:
+        """Get the path to the manifest.yaml file."""
+        result = cls.get_connector_root_dir() / MANIFEST_YAML
+        if result.exists():
+            return result
+
+        raise FileNotFoundError(
+            f"Manifest YAML file not found at {result}. "
+            "Please ensure that the test suite is run in the correct directory.",
+        )
+
+    @classproperty
+    def components_py_path(cls) -> Path | None:
+        """Get the path to the components.py file."""
+        result = cls.get_connector_root_dir() / "components.py"
+        if result.exists():
+            return result
+
+        return None
+
+    @classmethod
+    def create_connector(
+        cls,
+        scenario: ConnectorTestScenario,
+    ) -> IConnector:
+        """Create a connector instance for the test suite."""
+        config: dict[str, Any] = scenario.get_config_dict()
+        # catalog = scenario.get_catalog()
+        # state = scenario.get_state()
+        # source_config = scenario.get_source_config()
+
+        manifest_dict = yaml.safe_load(cls.manifest_yaml_path.read_text())
+        if cls.components_py_path and cls.components_py_path.exists():
+            os.environ["AIRBYTE_ENABLE_UNSAFE_CODE"] = "true"
+            config["__injected_components_py"] = cls.components_py_path.read_text()
+            config["__injected_components_py_checksums"] = {
+                "md5": md5_checksum(cls.components_py_path),
+            }
+
+        return cast(
+            IConnector,
+            ConcurrentDeclarativeSource(
+                config=config,
+                catalog=None,
+                state=None,
+                source_config=manifest_dict,
+            ),
+        )
diff --git a/airbyte_cdk/test/declarative/test_suites/destination_base.py b/airbyte_cdk/test/declarative/test_suites/destination_base.py
new file mode 100644
index 000000000..4cd49624c
--- /dev/null
+++ b/airbyte_cdk/test/declarative/test_suites/destination_base.py
@@ -0,0 +1,12 @@
+# Copyright (c) 2024 Airbyte, Inc., all rights reserved.
+"""Base class for destination test suites."""
+
+from airbyte_cdk.test.declarative.test_suites.connector_base import ConnectorTestSuiteBase
+
+
+class DestinationTestSuiteBase(ConnectorTestSuiteBase):
+    """Base class for destination test suites.
+
+    This class provides a base set of functionality for testing destination connectors, and it
+    inherits all generic connector tests from the `ConnectorTestSuiteBase` class.
+    """
diff --git a/airbyte_cdk/test/declarative/test_suites/source_base.py b/airbyte_cdk/test/declarative/test_suites/source_base.py
new file mode 100644
index 000000000..ed7d7366c
--- /dev/null
+++ b/airbyte_cdk/test/declarative/test_suites/source_base.py
@@ -0,0 +1,128 @@
+# Copyright (c) 2024 Airbyte, Inc., all rights reserved.
+"""Base class for source test suites."""
+
+from dataclasses import asdict
+
+from airbyte_cdk.models import (
+    AirbyteMessage,
+    AirbyteStream,
+    ConfiguredAirbyteCatalog,
+    ConfiguredAirbyteStream,
+    DestinationSyncMode,
+    SyncMode,
+    Type,
+)
+from airbyte_cdk.test import entrypoint_wrapper
+from airbyte_cdk.test.declarative.models import (
+    ConnectorTestScenario,
+)
+from airbyte_cdk.test.declarative.test_suites.connector_base import (
+    ConnectorTestSuiteBase,
+)
+from airbyte_cdk.test.declarative.utils.job_runner import run_test_job
+
+
+class SourceTestSuiteBase(ConnectorTestSuiteBase):
+    """Base class for source test suites.
+
+    This class provides a base set of functionality for testing source connectors, and it
+    inherits all generic connector tests from the `ConnectorTestSuiteBase` class.
+    """
+
+    def test_check(
+        self,
+        instance: ConnectorTestScenario,
+    ) -> None:
+        """Run `connection` acceptance tests."""
+        result: entrypoint_wrapper.EntrypointOutput = run_test_job(
+            self.create_connector(instance),
+            "check",
+            test_instance=instance,
+        )
+        conn_status_messages: list[AirbyteMessage] = [
+            msg for msg in result._messages if msg.type == Type.CONNECTION_STATUS
+        ]  # noqa: SLF001  # Non-public API
+        num_status_messages = len(conn_status_messages)
+        assert num_status_messages == 1, (
+            f"Expected exactly one CONNECTION_STATUS message. Got {num_status_messages}: \n"
+            + "\n".join([str(m) for m in result._messages])
+        )
+
+    def test_basic_read(
+        self,
+        instance: ConnectorTestScenario,
+    ) -> None:
+        """Run acceptance tests."""
+        discover_result = run_test_job(
+            self.create_connector(instance),
+            "discover",
+            test_instance=instance,
+        )
+        if instance.expect_exception:
+            assert discover_result.errors, "Expected exception but got none."
+            return
+
+        configured_catalog = ConfiguredAirbyteCatalog(
+            streams=[
+                ConfiguredAirbyteStream(
+                    stream=stream,
+                    sync_mode=SyncMode.full_refresh,
+                    destination_sync_mode=DestinationSyncMode.append_dedup,
+                )
+                for stream in discover_result.catalog.catalog.streams  # type: ignore [reportOptionalMemberAccess, union-attr]
+            ]
+        )
+        result = run_test_job(
+            self.create_connector(instance),
+            "read",
+            test_instance=instance,
+            catalog=configured_catalog,
+        )
+
+        if not result.records:
+            raise AssertionError("Expected records but got none.")  # noqa: TRY003
+
+    def test_fail_with_bad_catalog(
+        self,
+        instance: ConnectorTestScenario,
+    ) -> None:
+        """Test that a bad catalog fails."""
+        invalid_configured_catalog = ConfiguredAirbyteCatalog(
+            streams=[
+                # Create ConfiguredAirbyteStream which is deliberately invalid
+                # with regard to the Airbyte Protocol.
+                # This should cause the connector to fail.
+                ConfiguredAirbyteStream(
+                    stream=AirbyteStream(
+                        name="__AIRBYTE__stream_that_does_not_exist",
+                        json_schema={
+                            "type": "object",
+                            "properties": {"f1": {"type": "string"}},
+                        },
+                        supported_sync_modes=[SyncMode.full_refresh],
+                    ),
+                    sync_mode="INVALID",  # type: ignore [reportArgumentType]
+                    destination_sync_mode="INVALID",  # type: ignore [reportArgumentType]
+                )
+            ]
+        )
+        # Set expected status to "failed" to ensure the test fails if the connector.
+        instance.status = "failed"
+        result = self.run_test_scenario(
+            "read",
+            test_scenario=instance,
+            catalog=asdict(invalid_configured_catalog),
+        )
+        assert result.errors, "Expected errors but got none."
+        assert result.trace_messages, "Expected trace messages but got none."
+
+    def test_discover(
+        self,
+        instance: ConnectorTestScenario,
+    ) -> None:
+        """Run acceptance tests."""
+        run_test_job(
+            self.create_connector(instance),
+            "check",
+            test_instance=instance,
+        )
diff --git a/unit_tests/source_declarative_manifest/resources/__init__.py b/airbyte_cdk/test/declarative/utils/__init__.py
similarity index 100%
rename from unit_tests/source_declarative_manifest/resources/__init__.py
rename to airbyte_cdk/test/declarative/utils/__init__.py
diff --git a/airbyte_cdk/test/declarative/utils/job_runner.py b/airbyte_cdk/test/declarative/utils/job_runner.py
new file mode 100644
index 000000000..2036401be
--- /dev/null
+++ b/airbyte_cdk/test/declarative/utils/job_runner.py
@@ -0,0 +1,150 @@
+import tempfile
+import uuid
+from dataclasses import asdict
+from pathlib import Path
+from typing import Any, Callable, Literal
+
+import orjson
+from typing_extensions import Protocol, runtime_checkable
+
+from airbyte_cdk.models import (
+    ConfiguredAirbyteCatalog,
+    Status,
+)
+from airbyte_cdk.test import entrypoint_wrapper
+from airbyte_cdk.test.declarative.models import (
+    ConnectorTestScenario,
+)
+
+
+@runtime_checkable
+class IConnector(Protocol):
+    """A connector that can be run in a test scenario."""
+
+    def launch(self, args: list[str] | None) -> None:
+        """Launch the connector with the given arguments."""
+        ...
+
+
+def run_test_job(
+    connector: IConnector | type[IConnector] | Callable[[], IConnector],
+    verb: Literal["read", "check", "discover"],
+    test_instance: ConnectorTestScenario,
+    *,
+    catalog: ConfiguredAirbyteCatalog | dict[str, Any] | None = None,
+) -> entrypoint_wrapper.EntrypointOutput:
+    """Run a test job from provided CLI args and return the result."""
+    if not connector:
+        raise ValueError("Connector is required")
+
+    if catalog and isinstance(catalog, ConfiguredAirbyteCatalog):
+        # Convert the catalog to a dict if it's already a ConfiguredAirbyteCatalog.
+        catalog = asdict(catalog)
+
+    connector_obj: IConnector
+    if isinstance(connector, type) or callable(connector):
+        # If the connector is a class or a factory lambda, instantiate it.
+        connector_obj = connector()
+    elif (
+        isinstance(
+            connector,
+            IConnector,
+        )
+        or True
+    ):  # TODO: Get a valid protocol check here
+        connector_obj = connector
+    else:
+        raise ValueError(
+            f"Invalid connector input: {type(connector)}",
+        )
+
+    args: list[str] = [verb]
+    if test_instance.config_path:
+        args += ["--config", str(test_instance.config_path)]
+    elif test_instance.config_dict:
+        config_path = (
+            Path(tempfile.gettempdir()) / "airbyte-test" / f"temp_config_{uuid.uuid4().hex}.json"
+        )
+        config_path.parent.mkdir(parents=True, exist_ok=True)
+        config_path.write_text(orjson.dumps(test_instance.config_dict).decode())
+        args += ["--config", str(config_path)]
+
+    catalog_path: Path | None = None
+    if verb not in ["discover", "check"]:
+        # We need a catalog for read.
+        if catalog:
+            # Write the catalog to a temp json file and pass the path to the file as an argument.
+            catalog_path = (
+                Path(tempfile.gettempdir())
+                / "airbyte-test"
+                / f"temp_catalog_{uuid.uuid4().hex}.json"
+            )
+            catalog_path.parent.mkdir(parents=True, exist_ok=True)
+            catalog_path.write_text(orjson.dumps(catalog).decode())
+        elif test_instance.configured_catalog_path:
+            catalog_path = Path(test_instance.configured_catalog_path)
+
+        if catalog_path:
+            args += ["--catalog", str(catalog_path)]
+
+    # This is a bit of a hack because the source needs the catalog early.
+    # Because it *also* can fail, we have ot redundantly wrap it in a try/except block.
+
+    result: entrypoint_wrapper.EntrypointOutput = entrypoint_wrapper._run_command(  # noqa: SLF001  # Non-public API
+        source=connector_obj,  # type: ignore [arg-type]
+        args=args,
+        expecting_exception=test_instance.expect_exception,
+    )
+    if result.errors and not test_instance.expect_exception:
+        raise AssertionError(
+            "\n\n".join(
+                [str(err.trace.error).replace("\\n", "\n") for err in result.errors if err.trace],
+            )
+        )
+
+    if verb == "check":
+        # Check is expected to fail gracefully without an exception.
+        # Instead, we assert that we have a CONNECTION_STATUS message with
+        # a failure status.
+        assert not result.errors, "Expected no errors from check. Got:\n" + "\n".join(
+            [str(error) for error in result.errors]
+        )
+        assert len(result.connection_status_messages) == 1, (
+            "Expected exactly one CONNECTION_STATUS message. Got "
+            f"{len(result.connection_status_messages)}:\n"
+            + "\n".join([str(msg) for msg in result.connection_status_messages])
+        )
+        if test_instance.expect_exception:
+            conn_status = result.connection_status_messages[0].connectionStatus
+            assert conn_status, (
+                "Expected CONNECTION_STATUS message to be present. Got: \n"
+                + "\n".join([str(msg) for msg in result.connection_status_messages])
+            )
+            assert conn_status.status == Status.FAILED, (
+                "Expected CONNECTION_STATUS message to be FAILED. Got: \n"
+                + "\n".join([str(msg) for msg in result.connection_status_messages])
+            )
+
+        return result
+
+    # For all other verbs, we assert check that an exception is raised (or not).
+    if test_instance.expect_exception:
+        if not result.errors:
+            raise AssertionError("Expected exception but got none.")
+
+        return result
+    if result.errors:
+        raise AssertionError(
+            "\n\n".join(
+                [
+                    str(err.trace.error).replace(
+                        "\\n",
+                        "\n",
+                    )
+                    for err in result.errors
+                    if err.trace
+                ],
+            )
+        )
+
+    return result
diff --git a/airbyte_cdk/test/entrypoint_wrapper.py b/airbyte_cdk/test/entrypoint_wrapper.py
index f8e85bfb0..43c84204a 100644
--- a/airbyte_cdk/test/entrypoint_wrapper.py
+++ b/airbyte_cdk/test/entrypoint_wrapper.py
@@ -82,6 +82,10 @@ def records(self) -> List[AirbyteMessage]:
     def state_messages(self) -> List[AirbyteMessage]:
         return self._get_message_by_types([Type.STATE])
 
+    @property
+    def connection_status_messages(self) -> List[AirbyteMessage]:
+        return self._get_message_by_types([Type.CONNECTION_STATUS])
+
     @property
     def most_recent_state(self) -> Any:
         state_messages = self._get_message_by_types([Type.STATE])
diff --git a/airbyte_cdk/test/fixtures/__init__.py b/airbyte_cdk/test/fixtures/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/airbyte_cdk/test/fixtures/auto.py b/airbyte_cdk/test/fixtures/auto.py
new file mode 100644
index 000000000..8f7a34403
--- /dev/null
+++ b/airbyte_cdk/test/fixtures/auto.py
@@ -0,0 +1,14 @@
+"""Auto-use fixtures for pytest.
+
+WARNING: Importing this module will automatically apply these fixtures. If you want to selectively
+enable fixtures in a different context, you can import directly from the `fixtures.general` module.
+
+
+Usage:
+
+```python
+from airbyte_cdk.test.fixtures import auto
+# OR
+from airbyte_cdk.test.fixtures.auto import *
+```
+"""
diff --git a/airbyte_cdk/test/pytest_config/plugin.py b/airbyte_cdk/test/pytest_config/plugin.py
new file mode 100644
index 000000000..db9bb5176
--- /dev/null
+++ b/airbyte_cdk/test/pytest_config/plugin.py
@@ -0,0 +1,46 @@
+"""Global pytest configuration for the Airbyte CDK tests."""
+
+from pathlib import Path
+from typing import cast
+
+import pytest
+
+
+def pytest_collect_file(parent: pytest.Module | None, path: Path) -> pytest.Module | None:
+    """Collect test files based on their names."""
+    if path.name == "test_connector.py":
+        return cast(pytest.Module, pytest.Module.from_parent(parent, path=path))
+
+    return None
+
+
+def pytest_configure(config: pytest.Config) -> None:
+    config.addinivalue_line("markers", "connector: mark test as a connector test")
+
+
+def pytest_addoption(parser: pytest.Parser) -> None:
+    parser.addoption(
+        "--run-connector",
+        action="store_true",
+        default=False,
+        help="run connector tests",
+    )
+
+
+def pytest_collection_modifyitems(config: pytest.Config, items: list[pytest.Item]) -> None:
+    if config.getoption("--run-connector"):
+        return
+    skip_connector = pytest.mark.skip(reason="need --run-connector option to run")
+    for item in items:
+        if "connector" in item.keywords:
+            item.add_marker(skip_connector)
+
+
+def pytest_runtest_setup(item: pytest.Item) -> None:
+    # This hook is called before each test function is executed
+    print(f"Setting up test: {item.name}")
+
+
+def pytest_runtest_teardown(item: pytest.Item, nextitem: pytest.Item | None) -> None:
+    # This hook is called after each test function is executed
+    print(f"Tearing down test: {item.name}")
diff --git a/file::memory:?cache=shared b/file::memory:?cache=shared
new file mode 100644
index 000000000..2bbc3c91e
Binary files /dev/null and b/file::memory:?cache=shared differ
diff --git a/poetry.lock b/poetry.lock
index 992f7f8f8..704398c41 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -307,6 +307,19 @@ charset-normalizer = ["charset-normalizer"]
 html5lib = ["html5lib"]
 lxml = ["lxml"]
 
+[[package]]
+name = "boltons"
+version = "25.0.0"
+description = "When they're not builtins, they're boltons."
+optional = false
+python-versions = ">=3.7"
+groups = ["main"]
+markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
+files = [
+    {file = "boltons-25.0.0-py3-none-any.whl", hash = "sha256:dc9fb38bf28985715497d1b54d00b62ea866eca3938938ea9043e254a3a6ca62"},
+    {file = "boltons-25.0.0.tar.gz", hash = "sha256:e110fbdc30b7b9868cb604e3f71d4722dd8f4dcb4a5ddd06028ba8f1ab0b5ace"},
+]
+
 [[package]]
 name = "bracex"
 version = "2.5.post1"
@@ -4418,31 +4431,31 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"]
 
 [[package]]
 name = "ruff"
-version = "0.7.4"
+version = "0.11.4"
 description = "An extremely fast Python linter and code formatter, written in Rust."
 optional = false
 python-versions = ">=3.7"
 groups = ["dev"]
 markers = "python_version <= \"3.11\" or python_version >= \"3.12\""
 files = [
-    {file = "ruff-0.7.4-py3-none-linux_armv6l.whl", hash = "sha256:a4919925e7684a3f18e18243cd6bea7cfb8e968a6eaa8437971f681b7ec51478"},
-    {file = "ruff-0.7.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:cfb365c135b830778dda8c04fb7d4280ed0b984e1aec27f574445231e20d6c63"},
-    {file = "ruff-0.7.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:63a569b36bc66fbadec5beaa539dd81e0527cb258b94e29e0531ce41bacc1f20"},
-    {file = "ruff-0.7.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d06218747d361d06fd2fdac734e7fa92df36df93035db3dc2ad7aa9852cb109"},
-    {file = "ruff-0.7.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e0cea28d0944f74ebc33e9f934238f15c758841f9f5edd180b5315c203293452"},
-    {file = "ruff-0.7.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80094ecd4793c68b2571b128f91754d60f692d64bc0d7272ec9197fdd09bf9ea"},
-    {file = "ruff-0.7.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:997512325c6620d1c4c2b15db49ef59543ef9cd0f4aa8065ec2ae5103cedc7e7"},
-    {file = "ruff-0.7.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00b4cf3a6b5fad6d1a66e7574d78956bbd09abfd6c8a997798f01f5da3d46a05"},
-    {file = "ruff-0.7.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7dbdc7d8274e1422722933d1edddfdc65b4336abf0b16dfcb9dedd6e6a517d06"},
-    {file = "ruff-0.7.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e92dfb5f00eaedb1501b2f906ccabfd67b2355bdf117fea9719fc99ac2145bc"},
-    {file = "ruff-0.7.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3bd726099f277d735dc38900b6a8d6cf070f80828877941983a57bca1cd92172"},
-    {file = "ruff-0.7.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2e32829c429dd081ee5ba39aef436603e5b22335c3d3fff013cd585806a6486a"},
-    {file = "ruff-0.7.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:662a63b4971807623f6f90c1fb664613f67cc182dc4d991471c23c541fee62dd"},
-    {file = "ruff-0.7.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:876f5e09eaae3eb76814c1d3b68879891d6fde4824c015d48e7a7da4cf066a3a"},
-    {file = "ruff-0.7.4-py3-none-win32.whl", hash = "sha256:75c53f54904be42dd52a548728a5b572344b50d9b2873d13a3f8c5e3b91f5cac"},
-    {file = "ruff-0.7.4-py3-none-win_amd64.whl", hash = "sha256:745775c7b39f914238ed1f1b0bebed0b9155a17cd8bc0b08d3c87e4703b990d6"},
-    {file = "ruff-0.7.4-py3-none-win_arm64.whl", hash = "sha256:11bff065102c3ae9d3ea4dc9ecdfe5a5171349cdd0787c1fc64761212fc9cf1f"},
-    {file = "ruff-0.7.4.tar.gz", hash = "sha256:cd12e35031f5af6b9b93715d8c4f40360070b2041f81273d0527683d5708fce2"},
+    {file = "ruff-0.11.4-py3-none-linux_armv6l.whl", hash = "sha256:d9f4a761ecbde448a2d3e12fb398647c7f0bf526dbc354a643ec505965824ed2"},
+    {file = "ruff-0.11.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:8c1747d903447d45ca3d40c794d1a56458c51e5cc1bc77b7b64bd2cf0b1626cc"},
+    {file = "ruff-0.11.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:51a6494209cacca79e121e9b244dc30d3414dac8cc5afb93f852173a2ecfc906"},
+    {file = "ruff-0.11.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f171605f65f4fc49c87f41b456e882cd0c89e4ac9d58e149a2b07930e1d466f"},
+    {file = "ruff-0.11.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ebf99ea9af918878e6ce42098981fc8c1db3850fef2f1ada69fb1dcdb0f8e79e"},
+    {file = "ruff-0.11.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edad2eac42279df12e176564a23fc6f4aaeeb09abba840627780b1bb11a9d223"},
+    {file = "ruff-0.11.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f103a848be9ff379fc19b5d656c1f911d0a0b4e3e0424f9532ececf319a4296e"},
+    {file = "ruff-0.11.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:193e6fac6eb60cc97b9f728e953c21cc38a20077ed64f912e9d62b97487f3f2d"},
+    {file = "ruff-0.11.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7af4e5f69b7c138be8dcffa5b4a061bf6ba6a3301f632a6bce25d45daff9bc99"},
+    {file = "ruff-0.11.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:126b1bf13154aa18ae2d6c3c5efe144ec14b97c60844cfa6eb960c2a05188222"},
+    {file = "ruff-0.11.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e8806daaf9dfa881a0ed603f8a0e364e4f11b6ed461b56cae2b1c0cab0645304"},
+    {file = "ruff-0.11.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:5d94bb1cc2fc94a769b0eb975344f1b1f3d294da1da9ddbb5a77665feb3a3019"},
+    {file = "ruff-0.11.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:995071203d0fe2183fc7a268766fd7603afb9996785f086b0d76edee8755c896"},
+    {file = "ruff-0.11.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:7a37ca937e307ea18156e775a6ac6e02f34b99e8c23fe63c1996185a4efe0751"},
+    {file = "ruff-0.11.4-py3-none-win32.whl", hash = "sha256:0e9365a7dff9b93af933dab8aebce53b72d8f815e131796268709890b4a83270"},
+    {file = "ruff-0.11.4-py3-none-win_amd64.whl", hash = "sha256:5a9fa1c69c7815e39fcfb3646bbfd7f528fa8e2d4bebdcf4c2bd0fa037a255fb"},
+    {file = "ruff-0.11.4-py3-none-win_arm64.whl", hash = "sha256:d435db6b9b93d02934cf61ef332e66af82da6d8c69aefdea5994c89997c7a0fc"},
+    {file = "ruff-0.11.4.tar.gz", hash = "sha256:f45bd2fb1a56a5a85fae3b95add03fb185a0b30cf47f5edc92aa0355ca1d7407"},
 ]
 
 [[package]]
@@ -5507,4 +5520,4 @@ vector-db-based = ["cohere", "langchain", "openai", "tiktoken"]
 [metadata]
 lock-version = "2.1"
 python-versions = ">=3.10,<3.13"
-content-hash = "c8731f26643e07136e524d5e0d6e0f5c2229cf63d43bf5644de9f1cf8e565197"
+content-hash = "1a1aad48ffac046019656e2cb42d03569862cac3c81d91b5cca583bc553236a5"
diff --git a/pyproject.toml b/pyproject.toml
index 6a05b939b..273b53f7b 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -32,6 +32,7 @@ enable = true
 python = ">=3.10,<3.13"
 airbyte-protocol-models-dataclasses = "^0.14"
 backoff = "*"
+boltons = "^25.0.0"
 cachetools = "*"
 dpath = "^2.1.6"
 dunamai = "^1.22.0"
@@ -88,7 +89,7 @@ whenever = "^0.6.16"
 freezegun = "*"
 mypy = "*"
 asyncio = "3.4.3"
-ruff = "^0.7.2"
+ruff = "^0.11.4"
 pdoc = "^15.0.0"
 poethepoet = "^0.24.2"
 pyproject-flake8 = "^6.1.0"
@@ -152,6 +153,9 @@ check-lockfile = {cmd = "poetry check", help = "Check the poetry lock file."}
 lint-fix = { cmd = "poetry run ruff check --fix .", help = "Auto-fix any lint issues that Ruff can automatically resolve (excluding 'unsafe' fixes)." }
 lint-fix-unsafe = { cmd = "poetry run ruff check --fix --unsafe-fixes .", help = "Lint-fix modified files, including 'unsafe' fixes. It is recommended to first commit any pending changes and then always manually review any unsafe changes applied." }
 
+# ruff fix everything
+ruff-fix = { sequence = ["lint-fix", "_format-fix-ruff"] , help = "Lint-fix and format-fix all code." }
+
 # Combined Check and Fix tasks
 
 check-all = {sequence = ["lint", "format-check", "type-check", "check-lockfile"], help = "Lint, format, and type-check modified files.", ignore_fail = "return_non_zero"}
diff --git a/unit_tests/connector_builder/test_connector_builder_handler.py b/unit_tests/connector_builder/test_connector_builder_handler.py
index 2e786af8c..6e5368788 100644
--- a/unit_tests/connector_builder/test_connector_builder_handler.py
+++ b/unit_tests/connector_builder/test_connector_builder_handler.py
@@ -124,7 +124,7 @@
                 "values": ["0", "1", "2", "3", "4", "5", "6", "7"],
                 "cursor_field": "item_id",
             },
-            "" "requester": {
+            "requester": {
                 "path": "/v3/marketing/lists",
                 "authenticator": {
                     "type": "BearerAuthenticator",
@@ -175,7 +175,7 @@
                 "values": ["0", "1", "2", "3", "4", "5", "6", "7"],
                 "cursor_field": "item_id",
             },
-            "" "requester": {
+            "requester": {
                 "path": "/v3/marketing/lists",
                 "authenticator": {
                     "type": "BearerAuthenticator",
@@ -348,7 +348,7 @@
                 "values": ["0", "1", "2", "3", "4", "5", "6", "7"],
                 "cursor_field": "item_id",
             },
-            "" "requester": {
+            "requester": {
                 "path": "/v3/marketing/lists",
                 "authenticator": {"type": "OAuthAuthenticator", "api_token": "{{ config.apikey }}"},
                 "request_parameters": {"a_param": "10"},
@@ -1221,9 +1221,9 @@ def test_handle_read_external_requests(deployment_mode, url_base, expected_error
             source, config, catalog, _A_PER_PARTITION_STATE, limits
         ).record.data
         if expected_error:
-            assert (
-                len(output_data["logs"]) > 0
-            ), "Expected at least one log message with the expected error"
+            assert len(output_data["logs"]) > 0, (
+                "Expected at least one log message with the expected error"
+            )
             error_message = output_data["logs"][0]
             assert error_message["level"] == "ERROR"
             assert expected_error in error_message["stacktrace"]
@@ -1317,9 +1317,9 @@ def test_handle_read_external_oauth_request(deployment_mode, token_url, expected
             source, config, catalog, _A_PER_PARTITION_STATE, limits
         ).record.data
         if expected_error:
-            assert (
-                len(output_data["logs"]) > 0
-            ), "Expected at least one log message with the expected error"
+            assert len(output_data["logs"]) > 0, (
+                "Expected at least one log message with the expected error"
+            )
             error_message = output_data["logs"][0]
             assert error_message["level"] == "ERROR"
             assert expected_error in error_message["stacktrace"]
diff --git a/unit_tests/destinations/test_destination.py b/unit_tests/destinations/test_destination.py
index 14f52be15..1f8f6573f 100644
--- a/unit_tests/destinations/test_destination.py
+++ b/unit_tests/destinations/test_destination.py
@@ -58,9 +58,9 @@ def test_successful_parse(
         self, arg_list: List[str], expected_output: Mapping[str, Any], destination: Destination
     ):
         parsed_args = vars(destination.parse_args(arg_list))
-        assert (
-            parsed_args == expected_output
-        ), f"Expected parsing {arg_list} to return parsed args {expected_output} but instead found {parsed_args}"
+        assert parsed_args == expected_output, (
+            f"Expected parsing {arg_list} to return parsed args {expected_output} but instead found {parsed_args}"
+        )
 
     @pytest.mark.parametrize(
         ("arg_list"),
diff --git a/unit_tests/resources/__init__.py b/unit_tests/resources/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/unit_tests/source_declarative_manifest/resources/invalid_local_manifest.yaml b/unit_tests/resources/invalid_local_manifest.yaml
similarity index 100%
rename from unit_tests/source_declarative_manifest/resources/invalid_local_manifest.yaml
rename to unit_tests/resources/invalid_local_manifest.yaml
diff --git a/unit_tests/source_declarative_manifest/resources/invalid_local_pokeapi_config.json b/unit_tests/resources/invalid_local_pokeapi_config.json
similarity index 100%
rename from unit_tests/source_declarative_manifest/resources/invalid_local_pokeapi_config.json
rename to unit_tests/resources/invalid_local_pokeapi_config.json
diff --git a/unit_tests/source_declarative_manifest/resources/invalid_remote_config.json b/unit_tests/resources/invalid_remote_config.json
similarity index 100%
rename from unit_tests/source_declarative_manifest/resources/invalid_remote_config.json
rename to unit_tests/resources/invalid_remote_config.json
diff --git a/unit_tests/source_declarative_manifest/resources/source_pokeapi_w_components_py/README.md b/unit_tests/resources/source_pokeapi_w_components_py/README.md
similarity index 100%
rename from unit_tests/source_declarative_manifest/resources/source_pokeapi_w_components_py/README.md
rename to unit_tests/resources/source_pokeapi_w_components_py/README.md
diff --git a/unit_tests/resources/source_pokeapi_w_components_py/__init__.py b/unit_tests/resources/source_pokeapi_w_components_py/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/unit_tests/resources/source_pokeapi_w_components_py/acceptance-test-config.yml b/unit_tests/resources/source_pokeapi_w_components_py/acceptance-test-config.yml
new file mode 100644
index 000000000..e707a9099
--- /dev/null
+++ b/unit_tests/resources/source_pokeapi_w_components_py/acceptance-test-config.yml
@@ -0,0 +1,29 @@
+# See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference)
+# for more information about how to configure these tests
+# connector_image: airbyte/source-pokeapi:dev
+acceptance_tests:
+  spec:
+    tests:
+      - spec_path: "manifest.yaml"
+        backward_compatibility_tests_config:
+          disable_for_version: "0.1.5"
+  connection:
+    tests:
+      - config_path: "valid_config.yaml"
+        status: "succeed"
+  discovery:
+    tests:
+      - config_path: "valid_config.yaml"
+        backward_compatibility_tests_config:
+          disable_for_version: "0.1.5"
+  basic_read:
+    tests:
+      - config_path: "valid_config.yaml"
+        configured_catalog_path: "integration_tests/configured_catalog.json"
+        empty_streams: []
+  incremental:
+    bypass_reason: "This connector does not implement incremental sync"
+  full_refresh:
+    tests:
+      - config_path: "valid_config.yaml"
+        configured_catalog_path: "integration_tests/configured_catalog.json"
diff --git a/unit_tests/source_declarative_manifest/resources/source_pokeapi_w_components_py/components.py b/unit_tests/resources/source_pokeapi_w_components_py/components.py
similarity index 54%
rename from unit_tests/source_declarative_manifest/resources/source_pokeapi_w_components_py/components.py
rename to unit_tests/resources/source_pokeapi_w_components_py/components.py
index 5e7e16f71..7c6c0330a 100644
--- a/unit_tests/source_declarative_manifest/resources/source_pokeapi_w_components_py/components.py
+++ b/unit_tests/resources/source_pokeapi_w_components_py/components.py
@@ -1,5 +1,6 @@
 """A sample implementation of custom components that does nothing but will cause syncs to fail if missing."""
 
+from collections.abc import Iterable, MutableMapping
 from typing import Any, Mapping
 
 import requests
@@ -18,3 +19,14 @@ class MyCustomExtractor(DpathExtractor):
     """
 
     pass
+
+
+class MyCustomFailingExtractor(DpathExtractor):
+    """Dummy class, intentionally raises an exception when extract_records is called."""
+
+    def extract_records(
+        self,
+        response: requests.Response,
+    ) -> Iterable[MutableMapping[Any, Any]]:
+        """Raise an exception when called."""
+        raise IntentionalException("This is an intentional failure for testing purposes.")
diff --git a/unit_tests/source_declarative_manifest/resources/source_pokeapi_w_components_py/components_failing.py b/unit_tests/resources/source_pokeapi_w_components_py/components_failing.py
similarity index 68%
rename from unit_tests/source_declarative_manifest/resources/source_pokeapi_w_components_py/components_failing.py
rename to unit_tests/resources/source_pokeapi_w_components_py/components_failing.py
index 5c05881e7..95a7c0662 100644
--- a/unit_tests/source_declarative_manifest/resources/source_pokeapi_w_components_py/components_failing.py
+++ b/unit_tests/resources/source_pokeapi_w_components_py/components_failing.py
@@ -1,11 +1,7 @@
-#
-# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
-#
 """A sample implementation of custom components that does nothing but will cause syncs to fail if missing."""
 
 from collections.abc import Iterable, MutableMapping
-from dataclasses import InitVar, dataclass
-from typing import Any, Mapping, Optional, Union
+from typing import Any
 
 import requests
 
@@ -17,8 +13,11 @@ class IntentionalException(Exception):
 
 
 class MyCustomExtractor(DpathExtractor):
+    """Dummy class, intentionally raises an exception when extract_records is called."""
+
     def extract_records(
         self,
         response: requests.Response,
     ) -> Iterable[MutableMapping[Any, Any]]:
-        raise IntentionalException
+        """Raise an exception when called."""
+        raise IntentionalException("This is an intentional failure for testing purposes.")
diff --git a/unit_tests/resources/source_pokeapi_w_components_py/integration_tests/__init__.py b/unit_tests/resources/source_pokeapi_w_components_py/integration_tests/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/unit_tests/resources/source_pokeapi_w_components_py/integration_tests/test_mini_cat.py b/unit_tests/resources/source_pokeapi_w_components_py/integration_tests/test_mini_cat.py
new file mode 100644
index 000000000..010a37b14
--- /dev/null
+++ b/unit_tests/resources/source_pokeapi_w_components_py/integration_tests/test_mini_cat.py
@@ -0,0 +1,21 @@
+# Copyright (c) 2025 Airbyte, Inc., all rights reserved.
+from pathlib import Path
+
+from airbyte_cdk.test.declarative.test_suites import (
+    DeclarativeSourceTestSuite,
+    generate_tests,
+)
+
+
+def pytest_generate_tests(metafunc) -> None:
+    generate_tests(metafunc)
+
+
+class TestSuiteSourcePokeAPI(DeclarativeSourceTestSuite):
+    """Test suite for the source_pokeapi_w_components source.
+
+    This class inherits from SourceTestSuiteBase and implements all of the tests in the suite.
+
+    As long as the class name starts with "Test", pytest will automatically discover and run the
+    tests in this class.
+    """
diff --git a/unit_tests/source_declarative_manifest/resources/source_pokeapi_w_components_py/manifest.yaml b/unit_tests/resources/source_pokeapi_w_components_py/manifest.yaml
similarity index 100%
rename from unit_tests/source_declarative_manifest/resources/source_pokeapi_w_components_py/manifest.yaml
rename to unit_tests/resources/source_pokeapi_w_components_py/manifest.yaml
diff --git a/unit_tests/resources/source_pokeapi_w_components_py/valid_config.yaml b/unit_tests/resources/source_pokeapi_w_components_py/valid_config.yaml
new file mode 100644
index 000000000..209b0a787
--- /dev/null
+++ b/unit_tests/resources/source_pokeapi_w_components_py/valid_config.yaml
@@ -0,0 +1 @@
+{ "start_date": "2024-01-01", "pokemon_name": "pikachu" }
diff --git a/unit_tests/source_declarative_manifest/resources/valid_local_manifest.yaml b/unit_tests/resources/valid_local_manifest.yaml
similarity index 100%
rename from unit_tests/source_declarative_manifest/resources/valid_local_manifest.yaml
rename to unit_tests/resources/valid_local_manifest.yaml
diff --git a/unit_tests/source_declarative_manifest/resources/valid_local_pokeapi_config.json b/unit_tests/resources/valid_local_pokeapi_config.json
similarity index 100%
rename from unit_tests/source_declarative_manifest/resources/valid_local_pokeapi_config.json
rename to unit_tests/resources/valid_local_pokeapi_config.json
diff --git a/unit_tests/source_declarative_manifest/resources/valid_remote_config.json b/unit_tests/resources/valid_remote_config.json
similarity index 100%
rename from unit_tests/source_declarative_manifest/resources/valid_remote_config.json
rename to unit_tests/resources/valid_remote_config.json
diff --git a/unit_tests/source_declarative_manifest/conftest.py b/unit_tests/source_declarative_manifest/conftest.py
index 3d61e65e8..6bb6784dd 100644
--- a/unit_tests/source_declarative_manifest/conftest.py
+++ b/unit_tests/source_declarative_manifest/conftest.py
@@ -3,33 +3,34 @@
 #
 
 import os
+from pathlib import Path
 
 import pytest
 import yaml
 
 
-def get_fixture_path(file_name):
-    return os.path.join(os.path.dirname(__file__), file_name)
+def get_resource_path(file_name) -> str:
+    return Path(__file__).parent.parent / "resources" / file_name
 
 
 @pytest.fixture
 def valid_remote_config():
-    return get_fixture_path("resources/valid_remote_config.json")
+    return get_resource_path("valid_remote_config.json")
 
 
 @pytest.fixture
 def invalid_remote_config():
-    return get_fixture_path("resources/invalid_remote_config.json")
+    return get_resource_path("invalid_remote_config.json")
 
 
 @pytest.fixture
 def valid_local_manifest():
-    return get_fixture_path("resources/valid_local_manifest.yaml")
+    return get_resource_path("valid_local_manifest.yaml")
 
 
 @pytest.fixture
 def invalid_local_manifest():
-    return get_fixture_path("resources/invalid_local_manifest.yaml")
+    return get_resource_path("invalid_local_manifest.yaml")
 
 
 @pytest.fixture
@@ -46,9 +47,9 @@ def invalid_local_manifest_yaml(invalid_local_manifest):
 
 @pytest.fixture
 def valid_local_config_file():
-    return get_fixture_path("resources/valid_local_pokeapi_config.json")
+    return get_resource_path("valid_local_pokeapi_config.json")
 
 
 @pytest.fixture
 def invalid_local_config_file():
-    return get_fixture_path("resources/invalid_local_pokeapi_config.json")
+    return get_resource_path("invalid_local_pokeapi_config.json")
diff --git a/unit_tests/source_declarative_manifest/resources/source_pokeapi_w_components_py/valid_config.yaml b/unit_tests/source_declarative_manifest/resources/source_pokeapi_w_components_py/valid_config.yaml
deleted file mode 100644
index 78af092bb..000000000
--- a/unit_tests/source_declarative_manifest/resources/source_pokeapi_w_components_py/valid_config.yaml
+++ /dev/null
@@ -1 +0,0 @@
-{ "start_date": "2024-01-01", "pokemon": "pikachu" }
diff --git a/unit_tests/source_declarative_manifest/test_source_declarative_w_custom_components.py b/unit_tests/source_declarative_manifest/test_source_declarative_w_custom_components.py
index 40bb6d40b..121a2064a 100644
--- a/unit_tests/source_declarative_manifest/test_source_declarative_w_custom_components.py
+++ b/unit_tests/source_declarative_manifest/test_source_declarative_w_custom_components.py
@@ -5,7 +5,6 @@
 import datetime
 import json
 import logging
-import os
 import sys
 import types
 from collections.abc import Callable, Mapping
@@ -33,6 +32,7 @@
     custom_code_execution_permitted,
     register_components_module_from_string,
 )
+from airbyte_cdk.test.declarative.test_suites.connector_base import MANIFEST_YAML
 
 SAMPLE_COMPONENTS_PY_TEXT = """
 def sample_function() -> str:
@@ -44,8 +44,8 @@ def sample_method(self) -> str:
 """
 
 
-def get_fixture_path(file_name) -> str:
-    return os.path.join(os.path.dirname(__file__), file_name)
+def get_resource_path(file_name) -> str:
+    return Path(__file__).parent.parent / "resources" / file_name
 
 
 def test_components_module_from_string() -> None:
@@ -90,19 +90,19 @@ def get_py_components_config_dict(
     *,
     failing_components: bool = False,
 ) -> dict[str, Any]:
-    connector_dir = Path(get_fixture_path("resources/source_pokeapi_w_components_py"))
-    manifest_yml_path: Path = connector_dir / "manifest.yaml"
+    connector_dir = Path(get_resource_path("source_pokeapi_w_components_py"))
+    manifest_yaml_path: Path = connector_dir / MANIFEST_YAML
     custom_py_code_path: Path = connector_dir / (
         "components.py" if not failing_components else "components_failing.py"
     )
     config_yaml_path: Path = connector_dir / "valid_config.yaml"
-    secrets_yaml_path: Path = connector_dir / "secrets.yaml"
+    # secrets_yaml_path: Path = connector_dir / "secrets.yaml"
 
-    manifest_dict = yaml.safe_load(manifest_yml_path.read_text())
+    manifest_dict = yaml.safe_load(manifest_yaml_path.read_text())
     assert manifest_dict, "Failed to load the manifest file."
-    assert isinstance(
-        manifest_dict, Mapping
-    ), f"Manifest file is type {type(manifest_dict).__name__}, not a mapping: {manifest_dict}"
+    assert isinstance(manifest_dict, Mapping), (
+        f"Manifest file is type {type(manifest_dict).__name__}, not a mapping: {manifest_dict}"
+    )
 
     custom_py_code = custom_py_code_path.read_text()
     combined_config_dict = {
diff --git a/unit_tests/sources/declarative/auth/test_session_token_auth.py b/unit_tests/sources/declarative/auth/test_session_token_auth.py
index 02de5b5b4..c4b15eb82 100644
--- a/unit_tests/sources/declarative/auth/test_session_token_auth.py
+++ b/unit_tests/sources/declarative/auth/test_session_token_auth.py
@@ -189,7 +189,7 @@ def test_get_new_session_token(requests_mock):
     )
 
     session_token = get_new_session_token(
-        f'{config["instance_api_url"]}session',
+        f"{config['instance_api_url']}session",
         config["username"],
         config["password"],
         config["session_token_response_key"],
diff --git a/unit_tests/sources/declarative/incremental/test_concurrent_perpartitioncursor.py b/unit_tests/sources/declarative/incremental/test_concurrent_perpartitioncursor.py
index 042a430aa..e23d03a4a 100644
--- a/unit_tests/sources/declarative/incremental/test_concurrent_perpartitioncursor.py
+++ b/unit_tests/sources/declarative/incremental/test_concurrent_perpartitioncursor.py
@@ -363,9 +363,9 @@ def run_mocked_test(
             request_count = len(
                 [req for req in m.request_history if unquote(req.url) == unquote(url)]
             )
-            assert (
-                request_count == 1
-            ), f"URL {url} was called {request_count} times, expected exactly once."
+            assert request_count == 1, (
+                f"URL {url} was called {request_count} times, expected exactly once."
+            )
 
 
 def _run_read(
@@ -855,10 +855,11 @@ def run_incremental_parent_state_test(
             expected_records_set = list(
                 {orjson.dumps(record): record for record in expected_records}.values()
             )
-            assert (
-                sorted(cumulative_records_state_deduped, key=lambda x: x["id"])
-                == sorted(expected_records_set, key=lambda x: x["id"])
-            ), f"Records mismatch with intermediate state {state}. Expected {expected_records}, got {cumulative_records_state_deduped}"
+            assert sorted(cumulative_records_state_deduped, key=lambda x: x["id"]) == sorted(
+                expected_records_set, key=lambda x: x["id"]
+            ), (
+                f"Records mismatch with intermediate state {state}. Expected {expected_records}, got {cumulative_records_state_deduped}"
+            )
 
             # Store the final state after each intermediate read
             final_state_intermediate = [
@@ -869,9 +870,9 @@ def run_incremental_parent_state_test(
 
         # Assert that the final state matches the expected state for all runs
         for i, final_state in enumerate(final_states):
-            assert (
-                final_state in expected_states
-            ), f"Final state mismatch at run {i + 1}. Expected {expected_states}, got {final_state}"
+            assert final_state in expected_states, (
+                f"Final state mismatch at run {i + 1}. Expected {expected_states}, got {final_state}"
+            )
 
 
 @pytest.mark.parametrize(
@@ -1300,8 +1301,7 @@ def test_incremental_parent_state(
                             {"id": 11, "post_id": 1, "updated_at": COMMENT_11_UPDATED_AT},
                         ],
                         "next_page": (
-                            "https://api.example.com/community/posts/1/comments"
-                            "?per_page=100&page=2"
+                            "https://api.example.com/community/posts/1/comments?per_page=100&page=2"
                         ),
                     },
                 ),
@@ -1346,8 +1346,7 @@ def test_incremental_parent_state(
                     {
                         "comments": [{"id": 20, "post_id": 2, "updated_at": COMMENT_20_UPDATED_AT}],
                         "next_page": (
-                            "https://api.example.com/community/posts/2/comments"
-                            "?per_page=100&page=2"
+                            "https://api.example.com/community/posts/2/comments?per_page=100&page=2"
                         ),
                     },
                 ),
diff --git a/unit_tests/sources/declarative/parsers/test_model_to_component_factory.py b/unit_tests/sources/declarative/parsers/test_model_to_component_factory.py
index f628eeb3b..856106bfe 100644
--- a/unit_tests/sources/declarative/parsers/test_model_to_component_factory.py
+++ b/unit_tests/sources/declarative/parsers/test_model_to_component_factory.py
@@ -3413,9 +3413,9 @@ def migrate(self, stream_state: Mapping[str, Any]) -> Mapping[str, Any]:
         stream_state_migrations=[DummyStateMigration()],
     )
     assert cursor.state["lookback_window"] != 10, "State migration wasn't called"
-    assert (
-        cursor.state["lookback_window"] == 20
-    ), "State migration was called, but actual state don't match expected"
+    assert cursor.state["lookback_window"] == 20, (
+        "State migration was called, but actual state don't match expected"
+    )
 
 
 def test_create_concurrent_cursor_uses_min_max_datetime_format_if_defined():
diff --git a/unit_tests/sources/declarative/partition_routers/test_grouping_partition_router.py b/unit_tests/sources/declarative/partition_routers/test_grouping_partition_router.py
index a75a48966..9bea606e4 100644
--- a/unit_tests/sources/declarative/partition_routers/test_grouping_partition_router.py
+++ b/unit_tests/sources/declarative/partition_routers/test_grouping_partition_router.py
@@ -269,9 +269,9 @@ def __next__(self):
         cursor_slice={},
         extra_fields={"name": ["Board 0", "Board 1"], "owner": ["User0", "User1"]},
     )
-    assert (
-        controlled_iter.yield_count == 2
-    ), "Only 2 slices should be yielded to form the first group"
+    assert controlled_iter.yield_count == 2, (
+        "Only 2 slices should be yielded to form the first group"
+    )
 
     # Get the second slice
     second_slice = next(slices_iter)
@@ -280,9 +280,9 @@ def __next__(self):
         cursor_slice={},
         extra_fields={"name": ["Board 2", "Board 3"], "owner": ["User2", "User3"]},
     )
-    assert (
-        controlled_iter.yield_count == 4
-    ), "Only 4 slices should be yielded up to the second group"
+    assert controlled_iter.yield_count == 4, (
+        "Only 4 slices should be yielded up to the second group"
+    )
 
     # Exhaust the iterator
     remaining_slices = list(slices_iter)
@@ -293,9 +293,9 @@ def __next__(self):
             extra_fields={"name": ["Board 4"], "owner": ["User4"]},
         )
     ]
-    assert (
-        controlled_iter.yield_count == 5
-    ), "All 5 slices should be yielded after exhausting the iterator"
+    assert controlled_iter.yield_count == 5, (
+        "All 5 slices should be yielded after exhausting the iterator"
+    )
 
 
 def test_set_initial_state_delegation(mock_config, mock_underlying_router):
diff --git a/unit_tests/sources/declarative/partition_routers/test_parent_state_stream.py b/unit_tests/sources/declarative/partition_routers/test_parent_state_stream.py
index 2cc6080e9..4fbbd7355 100644
--- a/unit_tests/sources/declarative/partition_routers/test_parent_state_stream.py
+++ b/unit_tests/sources/declarative/partition_routers/test_parent_state_stream.py
@@ -338,10 +338,11 @@ def run_incremental_parent_state_test(
             expected_records_set = list(
                 {orjson.dumps(record): record for record in expected_records}.values()
             )
-            assert (
-                sorted(cumulative_records_state_deduped, key=lambda x: orjson.dumps(x))
-                == sorted(expected_records_set, key=lambda x: orjson.dumps(x))
-            ), f"Records mismatch with intermediate state {state}. Expected {expected_records}, got {cumulative_records_state_deduped}"
+            assert sorted(
+                cumulative_records_state_deduped, key=lambda x: orjson.dumps(x)
+            ) == sorted(expected_records_set, key=lambda x: orjson.dumps(x)), (
+                f"Records mismatch with intermediate state {state}. Expected {expected_records}, got {cumulative_records_state_deduped}"
+            )
 
             # Store the final state after each intermediate read
             final_state_intermediate = [
@@ -353,9 +354,9 @@ def run_incremental_parent_state_test(
 
         # Assert that the final state matches the expected state for all runs
         for i, final_state in enumerate(final_states):
-            assert (
-                final_state in expected_states
-            ), f"Final state mismatch at run {i + 1}. Expected {expected_states}, got {final_state}"
+            assert final_state in expected_states, (
+                f"Final state mismatch at run {i + 1}. Expected {expected_states}, got {final_state}"
+            )
 
 
 @pytest.mark.parametrize(
diff --git a/unit_tests/sources/declarative/partition_routers/test_substream_partition_router.py b/unit_tests/sources/declarative/partition_routers/test_substream_partition_router.py
index 675e76a95..80c8f1e10 100644
--- a/unit_tests/sources/declarative/partition_routers/test_substream_partition_router.py
+++ b/unit_tests/sources/declarative/partition_routers/test_substream_partition_router.py
@@ -1200,13 +1200,13 @@ def test_substream_using_resumable_full_refresh_parent_stream_slices(use_increme
     # validate final state for closed substream slices
     final_state = substream_cursor_slicer.get_stream_state()
     if not use_incremental_dependency:
-        assert (
-            final_state["states"] == expected_substream_state["states"]
-        ), "State for substreams is not valid!"
+        assert final_state["states"] == expected_substream_state["states"], (
+            "State for substreams is not valid!"
+        )
     else:
-        assert (
-            final_state == expected_substream_state
-        ), "State for substreams with incremental dependency is not valid!"
+        assert final_state == expected_substream_state, (
+            "State for substreams with incremental dependency is not valid!"
+        )
 
 
 @pytest.mark.parametrize(
diff --git a/unit_tests/sources/declarative/test_concurrent_declarative_source.py b/unit_tests/sources/declarative/test_concurrent_declarative_source.py
index 4a043ac82..6af69eac5 100644
--- a/unit_tests/sources/declarative/test_concurrent_declarative_source.py
+++ b/unit_tests/sources/declarative/test_concurrent_declarative_source.py
@@ -1442,9 +1442,9 @@ def test_concurrent_declarative_source_runs_state_migrations_provided_in_manifes
         source_config=manifest, config=_CONFIG, catalog=_CATALOG, state=state
     )
     concurrent_streams, synchronous_streams = source._group_streams(_CONFIG)
-    assert (
-        concurrent_streams[0].cursor.state.get("state") != state_blob.__dict__
-    ), "State was not migrated."
+    assert concurrent_streams[0].cursor.state.get("state") != state_blob.__dict__, (
+        "State was not migrated."
+    )
     assert concurrent_streams[0].cursor.state.get("states") == [
         {"cursor": {"updated_at": "2024-08-21"}, "partition": {"type": "type_1"}},
         {"cursor": {"updated_at": "2024-08-21"}, "partition": {"type": "type_2"}},
diff --git a/unit_tests/sources/file_based/test_scenarios.py b/unit_tests/sources/file_based/test_scenarios.py
index 214dd0872..d70b7f4ef 100644
--- a/unit_tests/sources/file_based/test_scenarios.py
+++ b/unit_tests/sources/file_based/test_scenarios.py
@@ -122,9 +122,9 @@ def _verify_read_output(output: EntrypointOutput, scenario: TestScenario[Abstrac
 
     expected_states = list(filter(lambda e: "data" not in e, expected_records))
     states = list(filter(lambda r: r.state, records_and_state_messages))
-    assert (
-        len(states) > 0
-    ), "No state messages emitted. Successful syncs should emit at least one stream state."
+    assert len(states) > 0, (
+        "No state messages emitted. Successful syncs should emit at least one stream state."
+    )
     _verify_state_record_counts(sorted_records, states)
 
     if hasattr(scenario.source, "cursor_cls") and issubclass(
@@ -182,9 +182,9 @@ def _verify_analytics(
     expected_analytics: Optional[List[AirbyteAnalyticsTraceMessage]],
 ) -> None:
     if expected_analytics:
-        assert (
-            len(analytics) == len(expected_analytics)
-        ), f"Number of actual analytics messages ({len(analytics)}) did not match expected ({len(expected_analytics)})"
+        assert len(analytics) == len(expected_analytics), (
+            f"Number of actual analytics messages ({len(analytics)}) did not match expected ({len(expected_analytics)})"
+        )
         for actual, expected in zip(analytics, expected_analytics):
             actual_type, actual_value = actual.trace.analytics.type, actual.trace.analytics.value
             expected_type = expected.type
diff --git a/unit_tests/sources/streams/http/test_http.py b/unit_tests/sources/streams/http/test_http.py
index 9f6209866..f7ad9e47e 100644
--- a/unit_tests/sources/streams/http/test_http.py
+++ b/unit_tests/sources/streams/http/test_http.py
@@ -506,7 +506,7 @@ class CacheHttpStreamWithSlices(CacheHttpStream):
     paths = ["", "search"]
 
     def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str:
-        return f'{stream_slice["path"]}' if stream_slice else ""
+        return f"{stream_slice['path']}" if stream_slice else ""
 
     def stream_slices(self, **kwargs) -> Iterable[Optional[Mapping[str, Any]]]:
         for path in self.paths:
diff --git a/unit_tests/sources/streams/test_call_rate.py b/unit_tests/sources/streams/test_call_rate.py
index 853e2997e..b99905870 100644
--- a/unit_tests/sources/streams/test_call_rate.py
+++ b/unit_tests/sources/streams/test_call_rate.py
@@ -268,9 +268,9 @@ def test_limit_rate(self):
 
         with pytest.raises(CallRateLimitHit) as excinfo2:
             policy.try_acquire("call", weight=1), "call over limit"
-        assert (
-            excinfo2.value.time_to_wait < excinfo1.value.time_to_wait
-        ), "time to wait must decrease over time"
+        assert excinfo2.value.time_to_wait < excinfo1.value.time_to_wait, (
+            "time to wait must decrease over time"
+        )
 
     def test_limit_rate_support_custom_weight(self):
         """try_acquire must take into account provided weight and throw CallRateLimitHit when hit the limit."""
@@ -279,9 +279,9 @@ def test_limit_rate_support_custom_weight(self):
         policy.try_acquire("call", weight=2), "1st call with weight of 2"
         with pytest.raises(CallRateLimitHit) as excinfo:
             policy.try_acquire("call", weight=9), "2nd call, over limit since 2 + 9 = 11 > 10"
-        assert excinfo.value.time_to_wait.total_seconds() == pytest.approx(
-            60, 0.1
-        ), "should wait 1 minute before next call"
+        assert excinfo.value.time_to_wait.total_seconds() == pytest.approx(60, 0.1), (
+            "should wait 1 minute before next call"
+        )
 
     def test_multiple_limit_rates(self):
         """try_acquire must take into all call rates and apply stricter."""
diff --git a/unit_tests/sources/streams/test_stream_read.py b/unit_tests/sources/streams/test_stream_read.py
index ebe258ef2..ac11b7499 100644
--- a/unit_tests/sources/streams/test_stream_read.py
+++ b/unit_tests/sources/streams/test_stream_read.py
@@ -750,9 +750,9 @@ def test_configured_json_schema_with_invalid_properties():
     assert old_user_insights not in configured_json_schema_properties
     assert old_feature_info not in configured_json_schema_properties
     for stream_schema_property in stream_schema["properties"]:
-        assert (
-            stream_schema_property in configured_json_schema_properties
-        ), f"Stream schema property: {stream_schema_property} missing in configured schema"
+        assert stream_schema_property in configured_json_schema_properties, (
+            f"Stream schema property: {stream_schema_property} missing in configured schema"
+        )
         assert (
             stream_schema["properties"][stream_schema_property]
             == configured_json_schema_properties[stream_schema_property]
diff --git a/unit_tests/sources/test_config.py b/unit_tests/sources/test_config.py
index 23177c683..94d58540e 100644
--- a/unit_tests/sources/test_config.py
+++ b/unit_tests/sources/test_config.py
@@ -46,7 +46,7 @@ class TestBaseConfig:
                             "name": {"title": "Name", "type": "string"},
                             "selected_strategy": {
                                 "const": "option1",
-                                "title": "Selected " "Strategy",
+                                "title": "Selected Strategy",
                                 "type": "string",
                                 "default": "option1",
                             },
@@ -59,7 +59,7 @@ class TestBaseConfig:
                         "properties": {
                             "selected_strategy": {
                                 "const": "option2",
-                                "title": "Selected " "Strategy",
+                                "title": "Selected Strategy",
                                 "type": "string",
                                 "default": "option2",
                             },
diff --git a/unit_tests/test_entrypoint.py b/unit_tests/test_entrypoint.py
index e906e8b39..52d742c07 100644
--- a/unit_tests/test_entrypoint.py
+++ b/unit_tests/test_entrypoint.py
@@ -765,9 +765,9 @@ def test_handle_record_counts(
         assert message_count == expected_records_by_stream[stream_descriptor]
 
     if actual_message.type == Type.STATE:
-        assert isinstance(
-            actual_message.state.sourceStats.recordCount, float
-        ), "recordCount value should be expressed as a float"
+        assert isinstance(actual_message.state.sourceStats.recordCount, float), (
+            "recordCount value should be expressed as a float"
+        )
 
 
 def test_given_serialization_error_using_orjson_then_fallback_on_json(
diff --git a/unit_tests/test_exception_handler.py b/unit_tests/test_exception_handler.py
index 0edcf1247..95d0e9c3a 100644
--- a/unit_tests/test_exception_handler.py
+++ b/unit_tests/test_exception_handler.py
@@ -88,6 +88,6 @@ def test_uncaught_exception_handler():
     out_trace_message = AirbyteMessageSerializer.load(json.loads(trace_output))
     assert out_trace_message.trace.emitted_at > 0
     out_trace_message.trace.emitted_at = 0.0  # set a specific emitted_at value for testing
-    assert (
-        out_trace_message == expected_trace_message
-    ), "Trace message should be emitted in expected form"
+    assert out_trace_message == expected_trace_message, (
+        "Trace message should be emitted in expected form"
+    )
diff --git a/unit_tests/test_secure_logger.py b/unit_tests/test_secure_logger.py
index 0237091fe..757a069c7 100644
--- a/unit_tests/test_secure_logger.py
+++ b/unit_tests/test_secure_logger.py
@@ -203,12 +203,12 @@ def read(
         list(entrypoint.run(parsed_args))
     except Exception:
         sys.excepthook(*sys.exc_info())
-        assert (
-            I_AM_A_SECRET_VALUE not in capsys.readouterr().out
-        ), "Should have filtered non-secret value from exception trace message"
-        assert (
-            I_AM_A_SECRET_VALUE not in caplog.text
-        ), "Should have filtered secret value from exception log message"
+        assert I_AM_A_SECRET_VALUE not in capsys.readouterr().out, (
+            "Should have filtered non-secret value from exception trace message"
+        )
+        assert I_AM_A_SECRET_VALUE not in caplog.text, (
+            "Should have filtered secret value from exception log message"
+        )
 
 
 def test_non_airbyte_secrets_are_not_masked_on_uncaught_exceptions(mocker, caplog, capsys):
@@ -257,9 +257,9 @@ def read(
         list(entrypoint.run(parsed_args))
     except Exception:
         sys.excepthook(*sys.exc_info())
-        assert (
-            NOT_A_SECRET_VALUE in capsys.readouterr().out
-        ), "Should not have filtered non-secret value from exception trace message"
-        assert (
-            NOT_A_SECRET_VALUE in caplog.text
-        ), "Should not have filtered non-secret value from exception log message"
+        assert NOT_A_SECRET_VALUE in capsys.readouterr().out, (
+            "Should not have filtered non-secret value from exception trace message"
+        )
+        assert NOT_A_SECRET_VALUE in caplog.text, (
+            "Should not have filtered non-secret value from exception log message"
+        )
diff --git a/unit_tests/utils/test_secret_utils.py b/unit_tests/utils/test_secret_utils.py
index 73c93e670..d6f4f4563 100644
--- a/unit_tests/utils/test_secret_utils.py
+++ b/unit_tests/utils/test_secret_utils.py
@@ -150,9 +150,9 @@ def test_get_secret_paths(spec, expected):
     ],
 )
 def test_get_secrets(spec, config, expected):
-    assert (
-        get_secrets(spec, config) == expected
-    ), f"Expected the spec {spec} and config {config} to produce {expected}"
+    assert get_secrets(spec, config) == expected, (
+        f"Expected the spec {spec} and config {config} to produce {expected}"
+    )
 
 
 def test_secret_filtering():
diff --git a/unit_tests/utils/test_traced_exception.py b/unit_tests/utils/test_traced_exception.py
index 0e5b58439..21a44c646 100644
--- a/unit_tests/utils/test_traced_exception.py
+++ b/unit_tests/utils/test_traced_exception.py
@@ -76,7 +76,7 @@ def test_existing_exception_as_airbyte_message(raised_exception):
     assert airbyte_message.trace.error.internal_message == "an error has occurred"
     assert airbyte_message.trace.error.stack_trace.startswith("Traceback (most recent call last):")
     assert airbyte_message.trace.error.stack_trace.endswith(
-        'raise RuntimeError("an error has occurred")\n' "RuntimeError: an error has occurred\n"
+        'raise RuntimeError("an error has occurred")\nRuntimeError: an error has occurred\n'
     )