diff --git a/README.md b/README.md index ff213acf..7607911f 100644 --- a/README.md +++ b/README.md @@ -19,6 +19,10 @@ pip install MergePythonClient A full reference for this library is available [here](https://github.com/merge-api/merge-python-client/blob/HEAD/./reference.md). +### Merge Assurance + +Merge Assurance is a feature that proactively monitors for expiring API credentials and automatically refreshes them. You can find more information about it in the [Remediation: Merge Assurance Agent](https://github.com/merge-api/merge-python-client/blob/HEAD/./reference.md#remediation-merge-assurance-agent) section of the reference guide. + ## Usage Instantiate and use the client with the following: diff --git a/e2e_test/mock_server.py b/e2e_test/mock_server.py new file mode 100644 index 00000000..d88738ee --- /dev/null +++ b/e2e_test/mock_server.py @@ -0,0 +1,99 @@ +# A stateful mock server that simulates the Merge API for E2E tests. + +from __future__ import annotations + +import logging +import sys +import typing +from datetime import datetime, timedelta + +from flask import Flask, jsonify, request + +# --- Server Configuration --- +HOST = "127.0.0.1" +PORT = 5002 + +# --- In-Memory "Database" --- +# This global state allows the server to remember changes, like a token refresh. +mock_db: typing.Dict[str, typing.Dict[str, typing.Any]] = {} + +def reset_mock_db() -> None: + """Resets the database to its initial state.""" + global mock_db + now = datetime.now() + mock_db = { + "token_ok_1": {"expires_at": (now + timedelta(days=45)).timestamp(), "refreshed": False}, + "token_expiring_soon": {"expires_at": (now + timedelta(days=15)).timestamp(), "refreshed": False}, + # This token will fail on its first refresh attempt, then succeed. + "token_retry_success": { + "expires_at": (now + timedelta(days=20)).timestamp(), + "refreshed": False, + "attempts": 0, + }, + "token_immediate_failure": {"expires_at": (now + timedelta(days=5)).timestamp(), "refreshed": False}, + } + +# --- Logging Setup --- +# Configure a logger for the mock server +log = logging.getLogger("MOCK_SERVER") +log.setLevel(logging.INFO) +handler = logging.StreamHandler(sys.stdout) +formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') +handler.setFormatter(formatter) +log.addHandler(handler) +# Silence the default Werkzeug logger to use our own. +logging.getLogger('werkzeug').setLevel(logging.ERROR) + + +# --- Flask App --- +app = Flask(__name__) + +@app.route("/api/v1/credentials", methods=["GET"]) +def get_credentials() -> typing.Any: + """Returns the current state of credentials.""" + log.info("Request received for GET /api/v1/credentials") + return jsonify(mock_db) + +@app.route("/api/v1/refresh-token", methods=["POST"]) +def refresh_token() -> typing.Any: + """Simulates the token refresh logic.""" + data = request.get_json() + token = data.get("account_token") + log.info(f"Request received for POST /api/v1/refresh-token for token: {token}") + + if not token or token not in mock_db: + return jsonify({"error": "Invalid Token"}), 404 + + # --- Simulation Logic --- + if token == "token_expiring_soon": + # On success, update the DB to show the token is no longer expiring. + mock_db[token]["expires_at"] = (datetime.now() + timedelta(days=60)).timestamp() + mock_db[token]["refreshed"] = True + log.info(f"Successfully refreshed '{token}'. It now expires in 60 days.") + return jsonify({"status": "refreshed"}), 200 + + if token == "token_retry_success": + mock_db[token]["attempts"] += 1 + # Fail on the first attempt to simulate a transient network error. + if mock_db[token]["attempts"] <= 1: + log.info(f"Simulating retryable failure for '{token}'. Attempt #{mock_db[token]['attempts']}.") + return jsonify({"error": "Service Temporarily Unavailable"}), 503 + else: + # Succeed on the second attempt. + mock_db[token]["expires_at"] = (datetime.now() + timedelta(days=60)).timestamp() + mock_db[token]["refreshed"] = True + log.info(f"Successfully refreshed '{token}' after {mock_db[token]['attempts']} attempts.") + return jsonify({"status": "refreshed"}), 200 + + if token == "token_immediate_failure": + return jsonify({"error": "Invalid Refresh Token"}), 401 + + # Default case for other tokens + return jsonify({"status": "no_action_needed"}), 200 + +# --- Main Execution --- +if __name__ == "__main__": + reset_mock_db() + log.info(f"Starting Mock Merge API Server on http://{HOST}:{PORT}") + # Use a production-ready server like Waitress for cleaner logs + app.run(host=HOST, port=PORT) \ No newline at end of file diff --git a/e2e_test/sample_app.py b/e2e_test/sample_app.py new file mode 100644 index 00000000..9620657c --- /dev/null +++ b/e2e_test/sample_app.py @@ -0,0 +1,129 @@ +# A self-contained script to demonstrate the Assurance Agent in an E2E test. + +from __future__ import annotations + +import logging +import os +import signal +import subprocess +import sys +import time +import typing + +from merge.client import Merge +from merge.remediation.agent import AssuranceAgent + +# --- App Configuration --- +MOCK_API_BASE_URL = "http://127.0.0.1:5002" +LOG_LEVEL = os.getenv("LOG_LEVEL", "INFO").upper() + +# --- Global State --- +agent_handle: typing.Optional[AssuranceAgent] = None + + +# A custom formatter to add color to the log output for readability. +class ColoredFormatter(logging.Formatter): + """A logging formatter that adds color to log messages.""" + + COLORS = { + "SUCCESS_CALLBACK": "\033[92m", # Green + "FAILURE_CALLBACK": "\033[91m", # Red + "ENDC": "\033[0m", + } + + def format(self, record: logging.LogRecord) -> str: + log_message = super().format(record) + # For our specific callbacks, find and replace the keyword. + # This is a simple approach; a more robust one might inspect record attributes. + for key, color in self.COLORS.items(): + if key in log_message: + log_message = log_message.replace(key, f"{color}{key}{self.COLORS['ENDC']}") + return log_message + + +# --- Logging Setup --- +def configure_logging() -> None: + """Sets up readable, colored logging for the application.""" + handler = logging.StreamHandler(sys.stdout) + # Use our new ColoredFormatter and a simpler log format + formatter = ColoredFormatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") + handler.setFormatter(formatter) + logging.basicConfig(level=LOG_LEVEL, handlers=[handler], force=True) + # Give httpx its own logger to reduce noise unless we're debugging + logging.getLogger("httpx").setLevel(logging.WARNING if LOG_LEVEL != "DEBUG" else logging.DEBUG) + + +# --- Merge Assurance Callbacks --- +def on_refresh_success(account_token: str) -> None: + """Callback for when a token is successfully refreshed.""" + # Log a plain string so the ColoredFormatter can process it. + logging.info(f"SUCCESS_CALLBACK: Token '{account_token}' was refreshed.") + + +def on_refresh_failure(account_token: str, error: Exception) -> None: + """Callback for when a token refresh fails.""" + # Log a plain string so the ColoredFormatter can process it. + logging.error(f"FAILURE_CALLBACK: Token '{account_token}' failed. Error: {error}") + + +# --- Main Execution --- +if __name__ == "__main__": + configure_logging() + server_process = None + + # Use a context manager for the server process to ensure it's always cleaned up. + try: + logging.info("Starting mock API server...") + server_command = [sys.executable, "e2e_test/mock_server.py"] + server_process = subprocess.Popen( + server_command, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True, + preexec_fn=os.setsid + ) + time.sleep(2) # Give the server a moment to start up + + if server_process.poll() is not None: + raise RuntimeError("Mock server failed to start. Is the port already in use?") + + # Initialize the Merge client + client = Merge( + api_key="YOUR_API_KEY", + account_token="YOUR_ACCOUNT_TOKEN", + base_url=MOCK_API_BASE_URL + ) + + agent_handle = client.remediation.enable_assurance( + on_success=on_refresh_success, + on_failure=on_refresh_failure, + check_interval_seconds=6, # Short interval for demo + expiry_threshold_days=30, + ) + + logging.info("--- DEMO START: Agent running for 30 seconds. ---") + time.sleep(30) + logging.info("--- DEMO END ---") + + finally: + if agent_handle: + logging.info("Shutting down Assurance Agent...") + agent_handle.shutdown() + + if server_process: + logging.info("Stopping mock API server...") + try: + os.killpg(os.getpgid(server_process.pid), signal.SIGTERM) + server_process.wait(timeout=5) + logging.info("Mock API server stopped.") + except (ProcessLookupError, OSError) as e: + logging.warning(f"Could not kill mock server, it may have already exited. Error: {e}") + + stdout, stderr = server_process.communicate() + if stderr: + # Don't log expected 'Address already in use' as an error if we catch it. + if "Address already in use" not in stderr: + logging.error(f"Mock server stderr: {stderr.strip()}") + + + logging.info("Demonstration complete.") diff --git a/poetry.lock b/poetry.lock index c2ddd863..80e0fcd8 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. [[package]] name = "annotated-types" @@ -6,6 +6,7 @@ version = "0.7.0" description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, @@ -20,6 +21,7 @@ version = "4.5.2" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "anyio-4.5.2-py3-none-any.whl", hash = "sha256:c011ee36bc1e8ba40e5a81cb9df91925c218fe9b778554e0b56a21e1b5d4716f"}, {file = "anyio-4.5.2.tar.gz", hash = "sha256:23009af4ed04ce05991845451e11ef02fc7c5ed29179ac9a420e5ad0ac7ddc5b"}, @@ -33,26 +35,56 @@ typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} [package.extras] doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21.0b1)"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21.0b1) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\""] trio = ["trio (>=0.26.1)"] +[[package]] +name = "blinker" +version = "1.8.2" +description = "Fast, simple object-to-object and broadcast signaling" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "blinker-1.8.2-py3-none-any.whl", hash = "sha256:1779309f71bf239144b9399d06ae925637cf6634cf6bd131104184531bf67c01"}, + {file = "blinker-1.8.2.tar.gz", hash = "sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83"}, +] + [[package]] name = "certifi" version = "2025.8.3" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5"}, {file = "certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407"}, ] +[[package]] +name = "click" +version = "8.1.8" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, + {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + [[package]] name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["dev"] +markers = "sys_platform == \"win32\" or platform_system == \"Windows\"" files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, @@ -64,6 +96,8 @@ version = "1.3.0" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" +groups = ["main", "dev"] +markers = "python_version < \"3.11\"" files = [ {file = "exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10"}, {file = "exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88"}, @@ -75,12 +109,37 @@ typing-extensions = {version = ">=4.6.0", markers = "python_version < \"3.13\""} [package.extras] test = ["pytest (>=6)"] +[[package]] +name = "flask" +version = "2.3.3" +description = "A simple framework for building complex web applications." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "flask-2.3.3-py3-none-any.whl", hash = "sha256:f69fcd559dc907ed196ab9df0e48471709175e696d6e698dd4dbe940f96ce66b"}, + {file = "flask-2.3.3.tar.gz", hash = "sha256:09c347a92aa7ff4a8e7f3206795f30d826654baf38b873d0744cd571ca609efc"}, +] + +[package.dependencies] +blinker = ">=1.6.2" +click = ">=8.1.3" +importlib-metadata = {version = ">=3.6.0", markers = "python_version < \"3.10\""} +itsdangerous = ">=2.1.2" +Jinja2 = ">=3.1.2" +Werkzeug = ">=2.3.7" + +[package.extras] +async = ["asgiref (>=3.2)"] +dotenv = ["python-dotenv"] + [[package]] name = "h11" version = "0.16.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86"}, {file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"}, @@ -92,6 +151,7 @@ version = "1.0.9" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55"}, {file = "httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8"}, @@ -113,6 +173,7 @@ version = "0.28.1" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, @@ -125,7 +186,7 @@ httpcore = "==1.*" idna = "*" [package.extras] -brotli = ["brotli", "brotlicffi"] +brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] @@ -137,6 +198,7 @@ version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -145,23 +207,150 @@ files = [ [package.extras] all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] +[[package]] +name = "importlib-metadata" +version = "8.5.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version < \"3.10\"" +files = [ + {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, + {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, +] + +[package.dependencies] +zipp = ">=3.20" + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +perf = ["ipython"] +test = ["flufl.flake8", "importlib-resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +type = ["pytest-mypy"] + [[package]] name = "iniconfig" version = "2.1.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, ] +[[package]] +name = "itsdangerous" +version = "2.2.0" +description = "Safely pass data to untrusted environments and back." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"}, + {file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"}, +] + +[[package]] +name = "jinja2" +version = "3.1.6" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, + {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + [[package]] name = "mypy" version = "1.13.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"}, {file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"}, @@ -215,6 +404,7 @@ version = "1.1.0" description = "Type system extensions for programs checked with the mypy type checker." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, @@ -226,6 +416,7 @@ version = "25.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, @@ -237,6 +428,7 @@ version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, @@ -252,6 +444,7 @@ version = "2.10.6" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"}, {file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"}, @@ -264,7 +457,7 @@ typing-extensions = ">=4.12.2" [package.extras] email = ["email-validator (>=2.0.0)"] -timezone = ["tzdata"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] [[package]] name = "pydantic-core" @@ -272,6 +465,7 @@ version = "2.27.2" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, @@ -384,6 +578,7 @@ version = "7.4.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, @@ -406,6 +601,7 @@ version = "0.23.8" description = "Pytest support for asyncio" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pytest_asyncio-0.23.8-py3-none-any.whl", hash = "sha256:50265d892689a5faefb84df80819d1ecef566eb3549cf915dfb33569359d1ce2"}, {file = "pytest_asyncio-0.23.8.tar.gz", hash = "sha256:759b10b33a6dc61cce40a8bd5205e302978bbbcc00e279a8b61d9a6a3c82e4d3"}, @@ -424,6 +620,7 @@ version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["dev"] files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, @@ -438,6 +635,7 @@ version = "0.11.5" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "ruff-0.11.5-py3-none-linux_armv6l.whl", hash = "sha256:2561294e108eb648e50f210671cc56aee590fb6167b594144401532138c66c7b"}, {file = "ruff-0.11.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ac12884b9e005c12d0bd121f56ccf8033e1614f736f766c118ad60780882a077"}, @@ -465,6 +663,7 @@ version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["dev"] files = [ {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, @@ -476,6 +675,7 @@ version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, @@ -487,6 +687,8 @@ version = "2.2.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version < \"3.11\"" files = [ {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, @@ -528,6 +730,7 @@ version = "2.9.0.20241206" description = "Typing stubs for python-dateutil" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "types_python_dateutil-2.9.0.20241206-py3-none-any.whl", hash = "sha256:e248a4bc70a486d3e3ec84d0dc30eec3a5f979d6e7ee4123ae043eedbb987f53"}, {file = "types_python_dateutil-2.9.0.20241206.tar.gz", hash = "sha256:18f493414c26ffba692a72369fea7a154c502646301ebfe3d56a04b3767284cb"}, @@ -539,12 +742,52 @@ version = "4.13.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, ] +[[package]] +name = "werkzeug" +version = "3.0.6" +description = "The comprehensive WSGI web application library." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "werkzeug-3.0.6-py3-none-any.whl", hash = "sha256:1bc0c2310d2fbb07b1dd1105eba2f7af72f322e1e455f2f93c993bee8c8a5f17"}, + {file = "werkzeug-3.0.6.tar.gz", hash = "sha256:a8dd59d4de28ca70471a34cba79bed5f7ef2e036a76b3ab0835474246eb41f8d"}, +] + +[package.dependencies] +MarkupSafe = ">=2.1.1" + +[package.extras] +watchdog = ["watchdog (>=2.3)"] + +[[package]] +name = "zipp" +version = "3.20.2" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version < \"3.10\"" +files = [ + {file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"}, + {file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources ; python_version < \"3.9\"", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] + [metadata] -lock-version = "2.0" +lock-version = "2.1" python-versions = "^3.8" -content-hash = "8551b871abee465e23fb0966d51f2c155fd257b55bdcb0c02d095de19f92f358" +content-hash = "408c09927de31c5e442b0d4209e32248d3fb0f7e0d4d72bb8952deb4233a7069" diff --git a/pyproject.toml b/pyproject.toml index 008bd7ad..28319fde 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -47,6 +47,7 @@ pytest-asyncio = "^0.23.5" python-dateutil = "^2.9.0" types-python-dateutil = "^2.9.0.20240316" ruff = "==0.11.5" +flask = "<3.0.0" [tool.pytest.ini_options] testpaths = [ "tests" ] diff --git a/reference.md b/reference.md index 8c298141..bee9fd20 100644 --- a/reference.md +++ b/reference.md @@ -29693,6 +29693,118 @@ client.hris.webhook_receivers.create( + + + + +## Remediation +
client.remediation.enable_assurance(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+Enable the Merge Assurance agent to proactively manage the lifecycle of your Merge credentials, preventing outages caused by token expiration. When enabled, this launches a lightweight background thread that periodically checks for and refreshes expiring account_tokens. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge import MergeClient +import atexit +import logging + +# Define optional callbacks for logging/monitoring +def on_success(token: str) -> None: + logging.info(f"[Merge Assurance] Successfully refreshed token: {token}") + +def on_failure(token: str, exc: Exception) -> None: + logging.error(f"[Merge Assurance] Failed to refresh token: {token}. Error: {exc}") + +# Initialize the client +client = MergeClient(api_key="") + +# Enable the agent and get the handle +assurance_agent = client.remediation.enable_assurance( + on_success=on_success, + on_failure=on_failure, + check_interval_seconds=1800, + expiry_threshold_days=14 +) + +# CRITICAL: Register the shutdown hook to ensure a clean exit +atexit.register(assurance_agent.shutdown) +``` +
+
+
+
+ +⚙️ Parameters +
+
+ +
+
+ +on_success: typing.Optional[typing.Callable[[str], None]] — A callback function executed upon a successful token refresh. Receives the account_token as an argument. + +
+
+ +
+
+ +on_failure: typing.Optional[typing.Callable[[str, Exception], None]] — A callback function executed after all refresh retries for a token have failed. Receives the account_token and the final Exception as arguments. + +
+
+ +
+
+ +check_interval_seconds: typing.Optional[int] — The interval, in seconds, at which the agent checks for expiring tokens. Defaults to 3600. + +
+
+ +
+
+ +expiry_threshold_days: typing.Optional[int] — The time window, in days, before expiration that a token is considered eligible for a refresh. Defaults to 30. + +
+
+ +
+
+ +🔩 Agent Handle +
+
+ +
+
+The enable_assurance() method returns a handle to the agent. This handle has a .shutdown() method that must be called during your application's exit sequence to ensure the background thread is terminated cleanly, as shown in the usage example with atexit. +
+
+
+
+
diff --git a/src/merge/__init__.py b/src/merge/__init__.py index 3a9ef08f..b2ec73aa 100644 --- a/src/merge/__init__.py +++ b/src/merge/__init__.py @@ -6,6 +6,7 @@ from importlib import import_module if typing.TYPE_CHECKING: + from . import remediation from .resources import accounting, ats, crm, filestorage, hris, ticketing from .client import AsyncMerge, Merge from .environment import MergeEnvironment @@ -21,6 +22,7 @@ "filestorage": ".resources", "hris": ".resources", "ticketing": ".resources", + "remediation": ".remediation", } @@ -54,4 +56,5 @@ def __dir__(): "filestorage", "hris", "ticketing", + "remediation", ] diff --git a/src/merge/client.py b/src/merge/client.py index f421282c..2a2e487b 100644 --- a/src/merge/client.py +++ b/src/merge/client.py @@ -9,6 +9,7 @@ from .environment import MergeEnvironment if typing.TYPE_CHECKING: + from .remediation.client import AsyncRemediationClient, RemediationClient from .resources.accounting.client import AccountingClient, AsyncAccountingClient from .resources.ats.client import AsyncAtsClient, AtsClient from .resources.crm.client import AsyncCrmClient, CrmClient @@ -16,7 +17,6 @@ from .resources.hris.client import AsyncHrisClient, HrisClient from .resources.ticketing.client import AsyncTicketingClient, TicketingClient - class Merge: """ Use this class to access the different functions within the SDK. You can instantiate any number of clients with different configuration that will propagate to these functions. @@ -92,6 +92,7 @@ def __init__( self._hris: typing.Optional[HrisClient] = None self._ticketing: typing.Optional[TicketingClient] = None self._accounting: typing.Optional[AccountingClient] = None + self._remediation: typing.Optional[RemediationClient] = None @property def ats(self): @@ -141,6 +142,23 @@ def accounting(self): self._accounting = AccountingClient(client_wrapper=self._client_wrapper) return self._accounting + @property + def remediation(self) -> "RemediationClient": + """ + Access the Merge Remediation client. + + This client provides access to native SDK features for proactively + managing and remediating common integration issues, such as + credential lifecycle management. + """ + if self._remediation is None: + # Note the lazy, inline import, matching their style exactly. + from .remediation.client import RemediationClient # noqa: E402 + + self._remediation = RemediationClient(client_wrapper=self._client_wrapper) + return self._remediation + + class AsyncMerge: """ @@ -217,6 +235,7 @@ def __init__( self._hris: typing.Optional[AsyncHrisClient] = None self._ticketing: typing.Optional[AsyncTicketingClient] = None self._accounting: typing.Optional[AsyncAccountingClient] = None + self._remediation: typing.Optional["AsyncRemediationClient"] = None @property def ats(self): @@ -266,6 +285,13 @@ def accounting(self): self._accounting = AsyncAccountingClient(client_wrapper=self._client_wrapper) return self._accounting + @property + def remediation(self) -> "AsyncRemediationClient": + if self._remediation is None: + from .remediation.client import AsyncRemediationClient # noqa: E402 + self._remediation = AsyncRemediationClient(client_wrapper=self._client_wrapper) + return self._remediation + def _get_base_url(*, base_url: typing.Optional[str] = None, environment: MergeEnvironment) -> str: if base_url is not None: diff --git a/src/merge/remediation/__init__.py b/src/merge/remediation/__init__.py new file mode 100644 index 00000000..1ec393ba --- /dev/null +++ b/src/merge/remediation/__init__.py @@ -0,0 +1,7 @@ +# This file was auto-generated by Fern. It is not intended for manual modification. + +from .agent import AssuranceAgent +from .client import AsyncRemediationClient, RemediationClient +from .errors import RefreshFailureError + +__all__ = ["AssuranceAgent", "RemediationClient", "AsyncRemediationClient", "RefreshFailureError"] diff --git a/src/merge/remediation/agent.py b/src/merge/remediation/agent.py new file mode 100644 index 00000000..dbf686cc --- /dev/null +++ b/src/merge/remediation/agent.py @@ -0,0 +1,233 @@ +# This file was auto-generated by Fern. It is not intended for manual modification. + +from __future__ import annotations + +import json +import logging +import random +import threading +import time +import typing +from types import TracebackType + +import httpx +from ..core.api_error import ApiError +from ..core.client_wrapper import SyncClientWrapper +from .errors import RefreshFailureError + +if typing.TYPE_CHECKING: + from .client import OnFailureCallback, OnSuccessCallback + +# Set up a logger for this module +logger = logging.getLogger(__name__) + + +class JsonLogFormatter(logging.Formatter): + """Custom formatter to output logs in JSON format.""" + + def format(self, record: logging.LogRecord) -> str: + log_record: typing.Dict[str, typing.Any] = { + "timestamp": self.formatTime(record, self.datefmt), + "level": record.levelname, + "name": record.name, + } + if isinstance(record.msg, dict): + log_record.update(typing.cast(typing.Dict[str, typing.Any], record.msg)) + else: + log_record["message"] = record.getMessage() + + if record.exc_info: + log_record["exc_info"] = self.formatException(record.exc_info) + + return json.dumps(log_record) + + +class AssuranceAgent: + """ + The core implementation of the Merge Assurance background agent. + Manages the lifecycle of monitoring and remediating credentials. + """ + + def __init__( + self, + *, + client_wrapper: SyncClientWrapper, + on_success: typing.Optional[OnSuccessCallback] = None, + on_failure: typing.Optional[OnFailureCallback] = None, + check_interval_seconds: int, + expiry_threshold_days: int, + # The base_url is injected for E2E testing to point to the mock server. + # In production, the client_wrapper's default base_url would be used. + base_url: typing.Optional[str] = None, + ) -> None: + self._client_wrapper = client_wrapper + self._on_success = on_success + self._on_failure = on_failure + self._check_interval_seconds = check_interval_seconds + self._expiry_threshold_seconds = expiry_threshold_days * 86400 + self._base_url = base_url or self._client_wrapper.get_base_url() + + self._timer: typing.Optional[threading.Timer] = None + self._stop_event = threading.Event() + self._running = False + self._processing_lock = threading.Lock() + # Add a set to track tokens that have definitively failed. + self._failed_tokens: typing.Set[str] = set() + + def is_running(self) -> bool: + return self._running + + def start(self) -> None: + """Starts the agent's monitoring loop in a background thread.""" + log_data = { + "message": "Assurance Agent starting.", + "component": "AssuranceAgent", + "check_interval_seconds": self._check_interval_seconds, + } + logger.info(log_data) + self._running = True + self._schedule_next_run() + + def _schedule_next_run(self) -> None: + """Schedules the next execution of the check cycle.""" + if not self._stop_event.is_set(): + self._timer = threading.Timer(self._check_interval_seconds, self._run_check_cycle) + self._timer.daemon = True + self._timer.start() + + def _run_check_cycle(self) -> None: + """The main workhorse method, executed periodically by the timer.""" + if self._stop_event.is_set() or not self._processing_lock.acquire(blocking=False): + if not self._stop_event.is_set(): + logger.warning({"message": "Skipping check cycle, previous cycle still running.", "component": "AssuranceAgent"}) + return + + try: + logger.info({"message": "Assurance Agent running check cycle.", "component": "AssuranceAgent"}) + self._check_and_remediate_credentials() + except Exception as e: + logger.error({"message": "Unhandled exception in check cycle.", "error": str(e), "component": "AssuranceAgent"}) + finally: + self._processing_lock.release() + if not self._stop_event.is_set(): + self._schedule_next_run() + + def _check_and_remediate_credentials(self) -> None: + """Fetches credentials from the (mock) API and triggers remediation.""" + try: + # In a real implementation, this endpoint would provide credential expiry info. + # For this POC, we hit our mock server. + response = self._client_wrapper.httpx_client.request( + "/api/v1/credentials", method="GET", base_url=self._base_url + ) + response.raise_for_status() + credentials = typing.cast(typing.Dict[str, typing.Any], response.json()) + except ApiError as e: + logger.error({"message": "Failed to fetch credentials for checking.", "error": str(e), "component": "AssuranceAgent"}) + return + + now = time.time() + for token, details in credentials.items(): + if details.get("refreshed", False): + continue + + # Skip tokens that we've already determined are unrecoverable. + if token in self._failed_tokens: + logger.debug({"message": "Skipping token marked as failed.", "component": "AssuranceAgent", "account_token": token}) + continue + + expires_at = details.get("expires_at", 0) + time_to_expiry = expires_at - now + + if 0 < time_to_expiry <= self._expiry_threshold_seconds: + log_data = { + "message": "Expiring token detected. Attempting refresh.", + "component": "AssuranceAgent", + "account_token": token, + "expires_in_days": round(time_to_expiry / 86400, 1), + } + logger.warning(log_data) + threading.Thread(target=self._attempt_refresh_with_retries, args=(token,)).start() + + def _attempt_refresh_with_retries(self, account_token: str) -> None: + """Attempts to refresh a token with exponential backoff and jitter via a real API call.""" + max_retries = 5 + base_delay_seconds = 5 + last_exception: typing.Optional[Exception] = None + + for attempt in range(max_retries): + try: + response = self._client_wrapper.httpx_client.request( + "/api/v1/refresh-token", + method="POST", + json={"account_token": account_token}, + base_url=self._base_url, + ) + response.raise_for_status() + logger.info({"message": "Successfully refreshed token.", "component": "AssuranceAgent", "account_token": account_token}) + if self._on_success: + self._on_success(account_token) + return + + except httpx.HTTPStatusError as e: + last_exception = e + # Non-retryable errors (e.g., 4xx client errors) should fail immediately. + if 400 <= e.response.status_code < 500: + logger.error({"message": "Non-retryable API error during refresh.", "component": "AssuranceAgent", "account_token": account_token, "status_code": e.response.status_code, "error": str(e.response.text)}) + break + # Retryable errors (e.g., 5xx server errors, timeouts) + log_data = { + "message": "Refresh attempt failed with retryable error. Retrying.", + "component": "AssuranceAgent", + "account_token": account_token, + "attempt": attempt + 1, + "max_attempts": max_retries, + "error": str(e), + } + logger.warning(log_data) + except Exception as e: + # Catch other potential exceptions like network issues + last_exception = e + log_data = { + "message": "Refresh attempt failed with unexpected error. Retrying.", + "component": "AssuranceAgent", + "account_token": account_token, + "attempt": attempt + 1, + "max_attempts": max_retries, + "error": str(e), + } + logger.warning(log_data) + + if attempt < max_retries - 1: + delay = (base_delay_seconds * (2**attempt)) + random.uniform(0, 0.1) + time.sleep(delay) + + final_error = RefreshFailureError(f"Failed to refresh token '{account_token}' after exhausting retries.", last_exception) + logger.error({"message": "All refresh attempts failed for token.", "component": "AssuranceAgent", "account_token": account_token}) + # Add the token to our set of failed tokens so we don't retry it on the next cycle. + self._failed_tokens.add(account_token) + if self._on_failure: + self._on_failure(account_token, final_error) + + def shutdown(self, wait: bool = True, timeout_seconds: typing.Optional[float] = None) -> None: + """Signals the background agent to shut down gracefully.""" + logger.info({"message": "Shutdown signal received.", "component": "AssuranceAgent"}) + self._stop_event.set() + if self._timer: + self._timer.cancel() + if wait: + if not self._processing_lock.acquire(timeout=timeout_seconds or 5.0): + logger.error({"message": "Agent shutdown timed out waiting for processing to complete.", "component": "AssuranceAgent"}) + self._running = False + logger.info({"message": "Assurance Agent stopped.", "component": "AssuranceAgent"}) + + def __enter__(self) -> "AssuranceAgent": + return self + + def __exit__( + self, + exc_type: typing.Optional[typing.Type[BaseException]], + exc_val: typing.Optional[BaseException], + exc_tb: typing.Optional[TracebackType], + ) -> None: + self.shutdown() \ No newline at end of file diff --git a/src/merge/remediation/client.py b/src/merge/remediation/client.py new file mode 100644 index 00000000..098f2980 --- /dev/null +++ b/src/merge/remediation/client.py @@ -0,0 +1,99 @@ +# This file was auto-generated by Fern. It is not intended for manual modification. + +from __future__ import annotations + +import logging +import typing + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from .agent import AssuranceAgent +from .errors import RefreshFailureError + +# Set up a logger for this module +logger = logging.getLogger(__name__) + +# Define the precise shapes of the callback functions for clarity and type safety. +OnSuccessCallback = typing.Callable[[str], None] +"""A callback function executed upon successful credential refresh.""" + +OnFailureCallback = typing.Callable[[str, RefreshFailureError], None] +"""A callback function executed upon final failure of a credential refresh.""" + + +class RemediationClient: + """ + Client for managing Merge's native SDK remediation features. + Accessed via `merge_client.remediation`. + """ + + def __init__(self, *, client_wrapper: SyncClientWrapper) -> None: + self._client_wrapper = client_wrapper + self._active_agent: typing.Optional[AssuranceAgent] = None + + def enable_assurance( + self, + *, + on_success: typing.Optional[OnSuccessCallback] = None, + on_failure: typing.Optional[OnFailureCallback] = None, + check_interval_seconds: int = 3600, + expiry_threshold_days: int = 30, + )-> "AssuranceAgent": + """ + Configures and activates the "Merge Assurance" agent. + + This method initializes and starts a background agent within the + application's process that proactively monitors and refreshes expiring + credentials (account tokens) before they can cause an outage. + + Parameters: + on_success: Optional callback executed when a token is successfully refreshed. + on_failure: Optional callback executed when a token refresh fails after all retries. + check_interval_seconds: The interval in seconds for checking token expiry. Defaults to 1 hour. + expiry_threshold_days: The window in days to consider a token "expiring". Defaults to 30 days. + + Returns: + An instance of the running AssuranceAgent, which provides a `shutdown()` method + for graceful termination. + """ + if self._active_agent and self._active_agent.is_running(): + logger.warning("Assurance Agent is already running. Returning existing instance.") + return self._active_agent + + if check_interval_seconds <= 0 or expiry_threshold_days <= 0: + raise ValueError("Intervals and thresholds must be positive values.") + + logger.info({"message": "Enabling Merge Assurance Agent.", "component": "AssuranceAgent"}) + agent = AssuranceAgent( + client_wrapper=self._client_wrapper, + on_success=on_success, + on_failure=on_failure, + check_interval_seconds=check_interval_seconds, + expiry_threshold_days=expiry_threshold_days + ) + agent.start() + self._active_agent = agent + return agent + + +class AsyncRemediationClient: + """ + Async client for managing Merge's native SDK remediation features. + Accessed via `async_merge_client.remediation`. + """ + + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def enable_assurance( + self, + *, + on_success: typing.Optional[OnSuccessCallback] = None, + on_failure: typing.Optional[OnFailureCallback] = None, + check_interval_seconds: int = 3600, + expiry_threshold_days: int = 30, + ) -> "AssuranceAgent": + """ + Configures and activates the async "Merge Assurance" agent. + """ + logger.warning("AsyncRemediationClient is a placeholder and not fully implemented.") + raise NotImplementedError("Async Assurance Agent is not yet implemented.") \ No newline at end of file diff --git a/src/merge/remediation/errors.py b/src/merge/remediation/errors.py new file mode 100644 index 00000000..9f99e61d --- /dev/null +++ b/src/merge/remediation/errors.py @@ -0,0 +1,16 @@ +# This file was auto-generated by Fern. It is not intended for manual modification. + +from __future__ import annotations + +import typing + + +class RefreshFailureError(Exception): + """ + Raised by the Assurance Agent when a credential refresh attempt fails definitively + after exhausting all retry mechanisms. + """ + + def __init__(self, message: str, original_exception: typing.Optional[Exception] = None) -> None: + super().__init__(message) + self.original_exception = original_exception \ No newline at end of file diff --git a/tests/integration/test_filestorage.py b/tests/integration/test_filestorage.py index 4acaa081..bce8670c 100644 --- a/tests/integration/test_filestorage.py +++ b/tests/integration/test_filestorage.py @@ -1,5 +1,7 @@ import os + import pytest + from merge import Merge from merge.resources.filestorage.types.permission import Permission diff --git a/tests/remediation/conftest.py b/tests/remediation/conftest.py new file mode 100644 index 00000000..2b48e572 --- /dev/null +++ b/tests/remediation/conftest.py @@ -0,0 +1,9 @@ +from unittest.mock import MagicMock + +import pytest + + +@pytest.fixture(scope="function") +def mock_client_wrapper() -> MagicMock: + """Provides a mock SyncClientWrapper for tests.""" + return MagicMock() diff --git a/tests/remediation/test_agent.py b/tests/remediation/test_agent.py new file mode 100644 index 00000000..a9eace09 --- /dev/null +++ b/tests/remediation/test_agent.py @@ -0,0 +1,140 @@ +# This file contains professional-grade, strictly-typed unit tests for the AssuranceAgent. + +from __future__ import annotations + +import unittest +from unittest.mock import ANY, MagicMock, patch + +import httpx + +from merge.remediation.agent import AssuranceAgent +from merge.remediation.errors import RefreshFailureError + + +class TestAssuranceAgent(unittest.TestCase): + """A suite of unit tests for the AssuranceAgent.""" + + mock_client_wrapper: MagicMock + mock_on_success: MagicMock + mock_on_failure: MagicMock + agent: AssuranceAgent + + def setUp(self) -> None: + """Set up a fresh agent instance and mocks before each test.""" + self.mock_client_wrapper = MagicMock() + self.mock_on_success = MagicMock() + self.mock_on_failure = MagicMock() + self.agent = AssuranceAgent( + client_wrapper=self.mock_client_wrapper, + on_success=self.mock_on_success, + on_failure=self.mock_on_failure, + check_interval_seconds=60, + expiry_threshold_days=30, + ) + + @patch("merge.remediation.agent.threading.Timer") + def test_start_schedules_first_run(self: "TestAssuranceAgent", mock_timer_cls: MagicMock) -> None: + """Verify that agent.start() correctly schedules the first check cycle.""" + self.agent.start() + self.assertTrue(self.agent.is_running()) + + # Assert that a Timer was created with the correct interval and target function. + mock_timer_cls.assert_called_once_with(60, self.agent._run_check_cycle) # type: ignore[misc] + mock_timer_cls.return_value.start.assert_called_once() + + @patch("merge.remediation.agent.threading.Thread") + @patch("merge.remediation.agent.time.time") + def test_check_cycle_identifies_expiring_token( + self: "TestAssuranceAgent", mock_time: MagicMock, mock_thread_cls: MagicMock + ) -> None: + """Ensure the agent correctly identifies a token within the expiry threshold.""" + current_time = 1000000000 # Fixed timestamp for testing + mock_time.return_value = current_time + + # Token expires in 15 days, which is within the 30-day threshold. + expires_at = current_time + (86400 * 15) + mock_store = {"expiring_token": {"expires_at": expires_at, "refreshed": False}} + + mock_response = MagicMock() + mock_response.json.return_value = mock_store + self.mock_client_wrapper.httpx_client.request.return_value = mock_response + + # We call the protected method directly to isolate this unit of logic. + self.agent._check_and_remediate_credentials() # type: ignore[misc] + + # It should have detected the token and spawned a thread to remediate it. + mock_thread_cls.assert_called_once_with( + target=self.agent._attempt_refresh_with_retries, args=("expiring_token",) # type: ignore[misc] + ) + + @patch("merge.remediation.agent.time.sleep") + def test_successful_refresh_invokes_on_success_callback( + self: "TestAssuranceAgent", mock_sleep: MagicMock + ) -> None: + """Verify that a successful token refresh triggers the `on_success` callback.""" + mock_response = MagicMock() + mock_response.raise_for_status.return_value = None + self.mock_client_wrapper.httpx_client.request.return_value = mock_response + + # We call the protected method directly to test the refresh logic. + self.agent._attempt_refresh_with_retries("good_token") # type: ignore[misc] + + self.mock_client_wrapper.httpx_client.request.assert_called_once_with( + "/api/v1/refresh-token", + method="POST", + json={"account_token": "good_token"}, + base_url=ANY, + ) + self.mock_on_success.assert_called_once_with("good_token") + self.mock_on_failure.assert_not_called() + + @patch("merge.remediation.agent.time.sleep") + def test_non_retryable_failure_invokes_on_failure_immediately( + self: "TestAssuranceAgent", mock_sleep: MagicMock + ) -> None: + """Test that a non-retryable error fails immediately without retries.""" + mock_response = httpx.Response(status_code=400, json={"error": "Invalid token"}) + mock_error = httpx.HTTPStatusError( + message="Bad Request", request=httpx.Request("POST", "/api/v1/refresh-token"), response=mock_response + ) + self.mock_client_wrapper.httpx_client.request.side_effect = mock_error + self.agent._attempt_refresh_with_retries("bad_token") # type: ignore[misc] + + # The API should only be called once. + self.mock_client_wrapper.httpx_client.request.assert_called_once_with( + "/api/v1/refresh-token", + method="POST", + json={"account_token": "bad_token"}, + base_url=ANY, + ) + self.mock_on_success.assert_not_called() + + # The failure callback should be invoked with a RefreshFailureError. + self.mock_on_failure.assert_called_once() + self.assertIsInstance(self.mock_on_failure.call_args[0][1], RefreshFailureError) + + @patch("merge.remediation.agent.time.sleep") + def test_retryable_failure_exhausts_retries_and_invokes_on_failure( + self: "TestAssuranceAgent", mock_sleep: MagicMock + ) -> None: + """Test that a retryable error attempts to refresh 5 times before failing.""" + self.mock_client_wrapper.httpx_client.request.side_effect = ConnectionError("flaky connection") + self.agent._attempt_refresh_with_retries("flaky_token") # type: ignore[misc] + + # It should have been called 5 times (the configured max_retries). + self.assertEqual(self.mock_client_wrapper.httpx_client.request.call_count, 5) + self.mock_on_success.assert_not_called() + self.mock_on_failure.assert_called_once_with("flaky_token", ANY) + + @patch("merge.remediation.agent.threading.Timer") + def test_shutdown_cancels_active_timer(self: "TestAssuranceAgent", mock_timer_cls: MagicMock) -> None: + """Verify the shutdown method correctly cancels the agent's timer.""" + # Simulate an active timer on the agent instance. + mock_timer_instance = MagicMock() + self.agent._timer = mock_timer_instance # type: ignore[misc] + + self.agent.shutdown() + + self.assertFalse(self.agent.is_running()) + # The key assertion: the timer's cancel() method was called. + mock_timer_instance.cancel.assert_called_once()