diff --git a/libs/3rdparty/python/BUILD b/libs/3rdparty/python/BUILD index b4793eb8..076e7ccb 100644 --- a/libs/3rdparty/python/BUILD +++ b/libs/3rdparty/python/BUILD @@ -39,6 +39,11 @@ python_requirements( source="rich-requirements.txt", ) +python_requirements( + name="a2a", + source="a2a-requirements.txt", +) + python_requirements( name="mcp", source="mcp-requirements.txt", diff --git a/libs/3rdparty/python/a2a-requirements.txt b/libs/3rdparty/python/a2a-requirements.txt new file mode 100644 index 00000000..cae43e35 --- /dev/null +++ b/libs/3rdparty/python/a2a-requirements.txt @@ -0,0 +1 @@ +a2a-sdk>=0.3.0 diff --git a/libs/next_gen_ui_a2a/BUILD b/libs/next_gen_ui_a2a/BUILD new file mode 100644 index 00000000..32ee93ec --- /dev/null +++ b/libs/next_gen_ui_a2a/BUILD @@ -0,0 +1,44 @@ +# This target sets the metadata for all the Python non-test files in this directory. +python_sources( + name="lib", + dependencies=[ + "libs/3rdparty/python:a2a", + ], +) + +# This target sets the metadata for all the Python test files in this directory. +python_tests( + name="tests", + dependencies=[ + "libs/3rdparty/python:a2a", + ], +) + +# This target allows us to build a `.whl` bdist and a `.tar.gz` sdist by auto-generating +# `setup.py`. See https://www.pantsbuild.org/docs/python-distributions. +# +# Because this target has no source code, Pants cannot infer dependencies. We depend on `:lib`, +# which means we'll include all the non-test Python files in this directory, and any of +# their dependencies. +python_distribution( + name="dist", + dependencies=[ + ":lib", + ], + provides=python_artifact( + name="next_gen_ui_a2a", + version=env("VERSION"), + license="Apache-2.0", + description="A2A integration for Next Gen UI Agent", + long_description_content_type="text/markdown", + classifiers=[ + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "License :: OSI Approved :: Apache Software License", + ], + url="https://github.com/RedHat-UX/next-gen-ui-agent", + ), + long_description_path="libs/next_gen_ui_a2a/README.md", + generate_setup=True, +) diff --git a/libs/next_gen_ui_a2a/Containerfile b/libs/next_gen_ui_a2a/Containerfile new file mode 100644 index 00000000..b5ccde2c --- /dev/null +++ b/libs/next_gen_ui_a2a/Containerfile @@ -0,0 +1,30 @@ +FROM registry.access.redhat.com/ubi9/python-312 + +# Set work directory +WORKDIR /opt/app-root/ + +# Install dependencies +RUN pip install a2a-sdk a2a-sdk[http-server] uvicorn langchain_openai + + +# Copy Python Project Files (Container context must be the `python` directory) +COPY . /opt/app-root + +USER root + +# Install next_gen_ui dependencies +RUN pip install \ + /opt/ngui-dist/next_gen_ui_agent*.whl \ + /opt/ngui-dist/next_gen_ui_rhds*.whl + + +# Allow non-root user to access the everything in app-root +RUN chgrp -R root /opt/app-root/ && chmod -R g+rwx /opt/app-root/ + +# Expose default port (change if needed) +EXPOSE 9999 + +USER 1001 + +# Run the agent +CMD python . \ No newline at end of file diff --git a/libs/next_gen_ui_a2a/README.md b/libs/next_gen_ui_a2a/README.md new file mode 100644 index 00000000..0829cf23 --- /dev/null +++ b/libs/next_gen_ui_a2a/README.md @@ -0,0 +1,222 @@ +# Next Gen UI Agent A2A Protocol Integration + +[A2A Protocol](https://a2a-protocol.org/) provides standard how to communicate with agent +and provides interoparability by client SDKs in different languages. + +This package provides resp. helps you build: +1. Standard A2A API to the Next Gen UI agent +2. HTTP Server to run the A2A API and execute the agent +3. Docker image + +To interact with agent via A2A protocol use any A2A client implemntation. + +## Installation + +```sh +pip install -U next_gen_ui_a2a +``` + +## Example + +### Run A2A server with Next Gen UI agent + +```py +import uvicorn +from a2a.server.apps import A2AStarletteApplication +from a2a.server.request_handlers import DefaultRequestHandler +from a2a.server.tasks import InMemoryTaskStore +from langchain_openai import ChatOpenAI + +from next_gen_ui_a2a.agent_card import card +from next_gen_ui_a2a.agent_executor import NextGenUIAgentExecutor +from next_gen_ui_agent.model import LangChainModelInference +from next_gen_ui_agent.types import AgentConfig + +if not os.environ.get("OPENAI_API_KEY"): + os.environ["OPENAI_API_KEY"] = "ollama" +model = os.getenv("INFERENCE_MODEL", "llama3.2") +base_url = os.getenv("OPEN_API_URL", "http://localhost:11434/v1") + +# Create Chat API used by next_gen_ui agent +llm = ChatOpenAI(model=model, base_url=base_url) +inference = LangChainModelInference(llm) +config = AgentConfig(inference=inference) + +request_handler = DefaultRequestHandler( + agent_executor=NextGenUIAgentExecutor(config), + task_store=InMemoryTaskStore(), +) + +server = A2AStarletteApplication( + agent_card=card, + http_handler=request_handler, +) + +uvicorn.run(server.build(), host="0.0.0.0", port=9999) +``` + +### Run A2A client + +```py +import logging +from uuid import uuid4 + +import httpx +from a2a.client import A2ACardResolver, A2AClient +from a2a.types import ( # SendStreamingMessageRequest, + AgentCard, + Message, + MessageSendParams, + Part, + Role, + SendMessageRequest, + TextPart, +) +from a2a.utils.constants import AGENT_CARD_WELL_KNOWN_PATH + + +async def main() -> None: + # Configure logging to show INFO level messages + logging.basicConfig(level=logging.INFO) + logger = logging.getLogger(__name__) # Get a logger instance + + base_url = "http://localhost:9999" + + async with httpx.AsyncClient(timeout=120) as httpx_client: + # Initialize A2ACardResolver + resolver = A2ACardResolver( + httpx_client=httpx_client, + base_url=base_url, + # agent_card_path uses default, extended_agent_card_path also uses default + ) + + # Fetch Public Agent Card and Initialize Client + final_agent_card_to_use: AgentCard | None = None + + try: + logger.info( + f"Attempting to fetch public agent card from: {base_url}{AGENT_CARD_WELL_KNOWN_PATH}" + ) + _public_card = ( + await resolver.get_agent_card() + ) # Fetches from default public path + logger.info("Successfully fetched public agent card:") + logger.info(_public_card.model_dump_json(indent=2, exclude_none=True)) + final_agent_card_to_use = _public_card + logger.info( + "\nUsing PUBLIC agent card for client initialization (default)." + ) + + except Exception as e: + logger.exception("Critical error fetching public agent card") + raise RuntimeError( + "Failed to fetch the public agent card. Cannot continue." + ) from e + + client = A2AClient( + httpx_client=httpx_client, + agent_card=final_agent_card_to_use, + ) + logger.info("A2AClient initialized.") + + movies_data = { + "movie": { + "languages": ["English"], + "year": 1995, + "imdbId": "0114709", + "runtime": 81, + "imdbRating": 8.3, + "movieId": "1", + "countries": ["USA"], + "imdbVotes": 591836, + "title": "Toy Story", + "url": "https://themoviedb.org/movie/862", + "revenue": 373554033, + "tmdbId": "862", + "plot": "A cowboy doll is profoundly threatened and jealous when a new spaceman figure supplants him as top toy in a boy's room.", + "posterUrl": "https://image.tmdb.org/t/p/w440_and_h660_face/uXDfjJbdP4ijW5hWSBrPrlKpxab.jpg", + "released": "2022-11-02", + "trailerUrl": "https://www.youtube.com/watch?v=v-PjgYDrg70", + "budget": 30000000, + }, + "actors": ["Jim Varney", "Tim Allen", "Tom Hanks", "Don Rickles"], + } + + message = Message( + role=Role.user, + parts=[ + Part( + root=TextPart( + text="Tell me details about Toy Story", + metadata={ + "data": movies_data, + "type": "search_movie", + }, + ) + ), + # Part(root=DataPart(data=movies_data)), + ], + message_id=str(uuid4()), + ) + request = SendMessageRequest( + id=str(uuid4()), params=MessageSendParams(message=message) + ) + + response = await client.send_message(request) + logger.info("Execution finished.") + print(response.model_dump(mode="json", exclude_none=True)) + + # streaming_request = SendStreamingMessageRequest( + # id=str(uuid4()), params=MessageSendParams(message=message) + # ) + # stream_response = client.send_message_streaming(streaming_request) + # async for chunk in stream_response: + # print(chunk.model_dump(mode="json", exclude_none=True)) + + +if __name__ == "__main__": + import asyncio + + asyncio.run(main()) +``` + + +## Build Container Image + +Agent can also be built using a container file. + +1. Build project + +```sh +pants package :: +``` + +2. Navigate to the directory `libs/next_gen_ui_a2a` directory: + +```sh +cd libs/next_gen_ui_a2a +``` + +3. Build the container file + +```sh +export PROJ_DIST_DIR=$(realpath ../../dist) +podman build . -v $PROJ_DIST_DIR:/opt/ngui-dist:ro,z -t ngui-a2a-server +``` + +> [!Tip] +> Podman is a drop-in replacement for `docker` which can also be used in these commands. + +3. Run you container + +```bash +podman run --rm -p 9999:9999 \ + -e INFERENCE_MODEL=llama3.2 \ + -e OPEN_API_URL=http://host.containers.internal:11434/v1 \ + ngui-a2a-server +``` +4. Validate server A2A + +```sh +curl http://localhost:9999/.well-known/agent-card.json +``` diff --git a/libs/next_gen_ui_a2a/__init__.py b/libs/next_gen_ui_a2a/__init__.py new file mode 100644 index 00000000..fe90db35 --- /dev/null +++ b/libs/next_gen_ui_a2a/__init__.py @@ -0,0 +1,5 @@ +from next_gen_ui_a2a.agent_executor import NextGenUIAgentExecutor + +__all__ = [ + "NextGenUIAgentExecutor", +] diff --git a/libs/next_gen_ui_a2a/__main__.py b/libs/next_gen_ui_a2a/__main__.py new file mode 100644 index 00000000..e9205806 --- /dev/null +++ b/libs/next_gen_ui_a2a/__main__.py @@ -0,0 +1,41 @@ +import logging +import os + +import uvicorn # pants: no-infer-dep +from a2a.server.apps import A2AStarletteApplication # pants: no-infer-dep +from a2a.server.request_handlers import DefaultRequestHandler # pants: no-infer-dep +from a2a.server.tasks import InMemoryTaskStore # pants: no-infer-dep +from agent_card import card # type: ignore[import-not-found] +from agent_executor import NextGenUIAgentExecutor # type: ignore[import-not-found] +from langchain_openai import ChatOpenAI # pants: no-infer-dep +from next_gen_ui_agent.model import LangChainModelInference +from next_gen_ui_agent.types import AgentConfig + +if __name__ == "__main__": + logging.basicConfig(level=logging.INFO) + logger = logging.getLogger(__name__) + + if not os.environ.get("OPENAI_API_KEY"): + os.environ["OPENAI_API_KEY"] = "ollama" + model = os.getenv("INFERENCE_MODEL", "llama3.2") + base_url = os.getenv("OPEN_API_URL", "http://localhost:11434/v1") + + logger.info( + "Starting Next Gen UI A2A Server. base_url=%s, model=%s", base_url, model + ) + + llm = ChatOpenAI(model=model, base_url=base_url) + inference = LangChainModelInference(llm) + config = AgentConfig(inference=inference) + + request_handler = DefaultRequestHandler( + agent_executor=NextGenUIAgentExecutor(config), + task_store=InMemoryTaskStore(), + ) + + server = A2AStarletteApplication( + agent_card=card, + http_handler=request_handler, + ) + + uvicorn.run(server.build(), host="0.0.0.0", port=9999) diff --git a/libs/next_gen_ui_a2a/agent_card.py b/libs/next_gen_ui_a2a/agent_card.py new file mode 100644 index 00000000..75f09864 --- /dev/null +++ b/libs/next_gen_ui_a2a/agent_card.py @@ -0,0 +1,24 @@ +from a2a.types import AgentCapabilities, AgentCard, AgentSkill + +skill = AgentSkill( + id="generate_ui_components", + name="Generates UI component", + description="Returns generated UI component", + tags=["ui"], + examples=[ + "First message TextPart should be user prompt. backend data could be passed as 'data' field in metadata or following DataParts" + ], +) + +# This will be the public-facing agent card +card = AgentCard( + name="Next Gen UI Agent", + description="Generates UI component based on structured input data and user prompt", + url="http://localhost:9999/", + version="1.0.0", + default_input_modes=["text"], + default_output_modes=["text"], + capabilities=AgentCapabilities(streaming=True), + skills=[skill], + supports_authenticated_extended_card=False, +) diff --git a/libs/next_gen_ui_a2a/agent_executor.py b/libs/next_gen_ui_a2a/agent_executor.py new file mode 100644 index 00000000..a398dc07 --- /dev/null +++ b/libs/next_gen_ui_a2a/agent_executor.py @@ -0,0 +1,78 @@ +import json +from uuid import uuid4 + +from a2a.server.agent_execution import AgentExecutor, RequestContext +from a2a.server.events import EventQueue +from a2a.types import DataPart, Message, TextPart +from a2a.utils import new_agent_text_message +from next_gen_ui_agent import AgentInput, InputData, NextGenUIAgent +from next_gen_ui_agent.types import AgentConfig + + +class NextGenUIAgentExecutor(AgentExecutor): + """Next Gen UI Agent Executor. AgentConfig is required""" + + def __init__(self, config: AgentConfig): + self.ngui_agent = NextGenUIAgent(config) + + def _data_selection(self, message: Message) -> tuple[str, list[InputData]]: + """Get data from the message parts.""" + input_data = [] + user_prompt = "" + + for p in message.parts: + id = uuid4().hex + part_root = p.root + if not user_prompt and isinstance(part_root, TextPart): + user_prompt = part_root.text + # Try to get data from metadata.datas + if part_root.metadata and part_root.metadata.get("data"): + input_data.append( + InputData( + id=id, + data=json.dumps(part_root.metadata["data"]), + type=str(part_root.metadata.get("type")), + ) + ) + + if isinstance(part_root, DataPart): + input_data.append(InputData(id=id, data=json.dumps(part_root.data))) + elif not user_prompt and isinstance(part_root, TextPart): + input_data.append(InputData(id=id, data=part_root.text)) + + return user_prompt, input_data + + async def execute( + self, + context: RequestContext, + event_queue: EventQueue, + ) -> None: + if not context.message: + raise ValueError("No message provided") + component_system = context.metadata.get("component_system", "json") + + user_prompt, input_data = self._data_selection(context.message) + if len(input_data) == 0: + # TODO: Throw a better error or map it to the right params error A2A error + raise ValueError( + "No input data gathered from either metadata of TextPart or DataPart" + ) + input = AgentInput( + user_prompt=user_prompt, + input_data=input_data, + ) + + components = await self.ngui_agent.component_selection(input=input) + components_data = self.ngui_agent.data_transformation( + input_data=input_data, + components=components, + ) + renditions = self.ngui_agent.design_system_handler( + components=components_data, + component_system=component_system, + ) + + await event_queue.enqueue_event(new_agent_text_message(renditions[0].content)) + + async def cancel(self, context: RequestContext, event_queue: EventQueue) -> None: + raise Exception("cancel not supported") diff --git a/libs/next_gen_ui_a2a/agent_executor_test.py b/libs/next_gen_ui_a2a/agent_executor_test.py new file mode 100644 index 00000000..119707b2 --- /dev/null +++ b/libs/next_gen_ui_a2a/agent_executor_test.py @@ -0,0 +1,138 @@ +from uuid import uuid4 + +import pytest +from a2a.server.agent_execution import SimpleRequestContextBuilder +from a2a.server.events import EventQueue +from a2a.types import DataPart, Message, MessageSendParams, Part, Role, TextPart +from langchain_core.language_models import FakeMessagesListChatModel +from langchain_core.messages import AIMessage +from next_gen_ui_a2a.agent_executor import NextGenUIAgentExecutor +from next_gen_ui_agent.data_transform.types import ComponentDataOneCard +from next_gen_ui_agent.model import LangChainModelInference + +USER_PROMPT = "Tell me brief details of Toy Story" +movies_data_obj = { + "movie": { + "languages": ["English"], + "year": 1995, + "imdbId": "0114709", + "runtime": 81, + "imdbRating": 8.3, + "movieId": "1", + "countries": ["USA"], + "imdbVotes": 591836, + "title": "Toy Story", + "url": "https://themoviedb.org/movie/862", + "revenue": 373554033, + "tmdbId": "862", + "plot": "A cowboy doll is profoundly threatened and jealous when a new spaceman figure supplants him as top toy in a boy's room.", + "posterUrl": "https://image.tmdb.org/t/p/w440_and_h660_face/uXDfjJbdP4ijW5hWSBrPrlKpxab.jpg", + "released": "2022-11-02", + "trailerUrl": "https://www.youtube.com/watch?v=v-PjgYDrg70", + "budget": 30000000, + }, + "actors": ["Jim Varney", "Tim Allen", "Tom Hanks", "Don Rickles"], +} +LLM_RESPONSE = """ + { + "title": "Toy Story Details", + "reasonForTheComponentSelection": "One item available in the data", + "confidenceScore": "100%", + "component": "one-card", + "fields" : [ + {"name":"Title","data_path":"movie.title"}, + {"name":"Year","data_path":"movie.year"}, + {"name":"IMDB Rating","data_path":"movie.imdbRating"}, + {"name":"Release Date","data_path":"movie.released"} + ] + } + """ + + +@pytest.mark.asyncio +async def test_agent_executor_one_message_and_metadata() -> None: + msg = AIMessage(content=LLM_RESPONSE) + llm = FakeMessagesListChatModel(responses=[msg]) + inference = LangChainModelInference(llm) + + executor = NextGenUIAgentExecutor({"inference": inference}) + + message = Message( + role=Role.user, + parts=[ + Part( + root=TextPart( + text="Tell me details about Toy Story", + metadata={ + "data": movies_data_obj, + "type": "search_movie", + }, + ) + ), + ], + message_id=str(uuid4()), + ) + + context = await SimpleRequestContextBuilder().build( + params=MessageSendParams(message=message) + ) + + event_queue = EventQueue() + await executor.execute(context, event_queue) + + event = await event_queue.dequeue_event(no_wait=True) + if isinstance(event, Message): + assert len(event.parts) == 1 + part_root = event.parts[0].root + if isinstance(part_root, TextPart): + # print(part_root.text) + c = ComponentDataOneCard.model_validate_json(part_root.text) + assert "one-card" == c.component + assert "Toy Story Details" == c.title + else: + raise Exception("message part is not TextPart") + else: + raise Exception("event is not message") + + +@pytest.mark.asyncio +async def test_agent_executor_two_messages() -> None: + msg = AIMessage(content=LLM_RESPONSE) + llm = FakeMessagesListChatModel(responses=[msg]) + inference = LangChainModelInference(llm) + + executor = NextGenUIAgentExecutor({"inference": inference}) + + message = Message( + role=Role.user, + parts=[ + Part( + root=TextPart( + text="Tell me details about Toy Story", + ) + ), + Part(root=DataPart(data=movies_data_obj)), + ], + message_id=str(uuid4()), + ) + + context = await SimpleRequestContextBuilder().build( + params=MessageSendParams(message=message) + ) + + event_queue = EventQueue() + await executor.execute(context, event_queue) + + event = await event_queue.dequeue_event(no_wait=True) + if isinstance(event, Message): + assert len(event.parts) == 1 + part_root = event.parts[0].root + if isinstance(part_root, TextPart): + # print(part_root.text) + c = ComponentDataOneCard.model_validate_json(part_root.text) + assert "one-card" == c.component + assert "Toy Story Details" == c.title + else: + raise Exception("message part is not TextPart") + else: + raise Exception("event is not message") diff --git a/libs/next_gen_ui_a2a/readme_example.py b/libs/next_gen_ui_a2a/readme_example.py new file mode 100644 index 00000000..0c605b1d --- /dev/null +++ b/libs/next_gen_ui_a2a/readme_example.py @@ -0,0 +1,122 @@ +import logging +from uuid import uuid4 + +import httpx +from a2a.client import A2ACardResolver, A2AClient +from a2a.types import ( # SendStreamingMessageRequest, + AgentCard, + Message, + MessageSendParams, + Part, + Role, + SendMessageRequest, + TextPart, +) +from a2a.utils.constants import AGENT_CARD_WELL_KNOWN_PATH + + +async def main() -> None: + # Configure logging to show INFO level messages + logging.basicConfig(level=logging.INFO) + logger = logging.getLogger(__name__) # Get a logger instance + + # --8<-- [start:A2ACardResolver] + + base_url = "http://localhost:9999" + + async with httpx.AsyncClient(timeout=120) as httpx_client: + # Initialize A2ACardResolver + resolver = A2ACardResolver( + httpx_client=httpx_client, + base_url=base_url, + # agent_card_path uses default, extended_agent_card_path also uses default + ) + + # Fetch Public Agent Card and Initialize Client + final_agent_card_to_use: AgentCard | None = None + + try: + logger.info( + f"Attempting to fetch public agent card from: {base_url}{AGENT_CARD_WELL_KNOWN_PATH}" + ) + _public_card = ( + await resolver.get_agent_card() + ) # Fetches from default public path + logger.info("Successfully fetched public agent card:") + logger.info(_public_card.model_dump_json(indent=2, exclude_none=True)) + final_agent_card_to_use = _public_card + logger.info( + "\nUsing PUBLIC agent card for client initialization (default)." + ) + + except Exception as e: + logger.exception("Critical error fetching public agent card") + raise RuntimeError( + "Failed to fetch the public agent card. Cannot continue." + ) from e + + client = A2AClient( + httpx_client=httpx_client, + agent_card=final_agent_card_to_use, + ) + logger.info("A2AClient initialized.") + + movies_data = { + "movie": { + "languages": ["English"], + "year": 1995, + "imdbId": "0114709", + "runtime": 81, + "imdbRating": 8.3, + "movieId": "1", + "countries": ["USA"], + "imdbVotes": 591836, + "title": "Toy Story", + "url": "https://themoviedb.org/movie/862", + "revenue": 373554033, + "tmdbId": "862", + "plot": "A cowboy doll is profoundly threatened and jealous when a new spaceman figure supplants him as top toy in a boy's room.", + "posterUrl": "https://image.tmdb.org/t/p/w440_and_h660_face/uXDfjJbdP4ijW5hWSBrPrlKpxab.jpg", + "released": "2022-11-02", + "trailerUrl": "https://www.youtube.com/watch?v=v-PjgYDrg70", + "budget": 30000000, + }, + "actors": ["Jim Varney", "Tim Allen", "Tom Hanks", "Don Rickles"], + } + + message = Message( + role=Role.user, + parts=[ + Part( + root=TextPart( + text="Tell me details about Toy Story", + metadata={ + "data": movies_data, + "type": "search_movie", + }, + ) + ), + # Part(root=DataPart(data=movies_data)), + ], + message_id=str(uuid4()), + ) + request = SendMessageRequest( + id=str(uuid4()), params=MessageSendParams(message=message) + ) + + response = await client.send_message(request) + logger.info("Execution finished.") + print(response.model_dump(mode="json", exclude_none=True)) + + # streaming_request = SendStreamingMessageRequest( + # id=str(uuid4()), params=MessageSendParams(message=message) + # ) + # stream_response = client.send_message_streaming(streaming_request) + # async for chunk in stream_response: + # print(chunk.model_dump(mode="json", exclude_none=True)) + + +if __name__ == "__main__": + import asyncio + + asyncio.run(main()) diff --git a/python-default.lock b/python-default.lock index fd565acb..1dd4b840 100644 --- a/python-default.lock +++ b/python-default.lock @@ -9,6 +9,7 @@ // "CPython<3.14,>=3.12" // ], // "generated_with_requirements": [ +// "a2a-sdk>=0.3.0", // "acp-sdk>=0.8.1", // "aiosqlite", // "ansicolors==1.1.8", @@ -74,6 +75,57 @@ "locked_resolves": [ { "locked_requirements": [ + { + "artifacts": [ + { + "algorithm": "sha256", + "hash": "0813b8fd7add427b2b56895cf28cae705303cf6d671b305c0aac69987816e03e", + "url": "https://files.pythonhosted.org/packages/e6/27/9cf8c6de4ae71e9c98ec96b3304449d5d0cd36ec3b95e66b6e7f58a9e571/a2a_sdk-0.3.7-py3-none-any.whl" + }, + { + "algorithm": "sha256", + "hash": "795aa2bd2cfb3c9e8654a1352bf5f75d6cf1205b262b1bf8f4003b5308267ea2", + "url": "https://files.pythonhosted.org/packages/8d/ad/b6ecb58f44459a24f1c260e91304e1ddbb7a8e213f1f82cc4c074f66e9bb/a2a_sdk-0.3.7.tar.gz" + } + ], + "project_name": "a2a-sdk", + "requires_dists": [ + "cryptography>=43.0.0; extra == \"all\"", + "cryptography>=43.0.0; extra == \"encryption\"", + "fastapi>=0.115.2; extra == \"all\"", + "fastapi>=0.115.2; extra == \"http-server\"", + "google-api-core>=1.26.0", + "grpcio-reflection>=1.7.0; extra == \"all\"", + "grpcio-reflection>=1.7.0; extra == \"grpc\"", + "grpcio-tools>=1.60; extra == \"all\"", + "grpcio-tools>=1.60; extra == \"grpc\"", + "grpcio>=1.60; extra == \"all\"", + "grpcio>=1.60; extra == \"grpc\"", + "httpx-sse>=0.4.0", + "httpx>=0.28.1", + "opentelemetry-api>=1.33.0; extra == \"all\"", + "opentelemetry-api>=1.33.0; extra == \"telemetry\"", + "opentelemetry-sdk>=1.33.0; extra == \"all\"", + "opentelemetry-sdk>=1.33.0; extra == \"telemetry\"", + "protobuf>=5.29.5", + "pydantic>=2.11.3", + "sqlalchemy[aiomysql,asyncio]>=2.0.0; extra == \"all\"", + "sqlalchemy[aiomysql,asyncio]>=2.0.0; extra == \"mysql\"", + "sqlalchemy[aiomysql,asyncio]>=2.0.0; extra == \"sql\"", + "sqlalchemy[aiosqlite,asyncio]>=2.0.0; extra == \"all\"", + "sqlalchemy[aiosqlite,asyncio]>=2.0.0; extra == \"sql\"", + "sqlalchemy[aiosqlite,asyncio]>=2.0.0; extra == \"sqlite\"", + "sqlalchemy[asyncio,postgresql-asyncpg]>=2.0.0; extra == \"all\"", + "sqlalchemy[asyncio,postgresql-asyncpg]>=2.0.0; extra == \"postgresql\"", + "sqlalchemy[asyncio,postgresql-asyncpg]>=2.0.0; extra == \"sql\"", + "sse-starlette; extra == \"all\"", + "sse-starlette; extra == \"http-server\"", + "starlette; extra == \"all\"", + "starlette; extra == \"http-server\"" + ], + "requires_python": ">=3.10", + "version": "0.3.7" + }, { "artifacts": [ { @@ -2210,6 +2262,96 @@ "requires_python": ">=3.7", "version": "3.1.45" }, + { + "artifacts": [ + { + "algorithm": "sha256", + "hash": "8a2a56c1fef82987a524371f99f3bd0143702fecc670c72e600c1cda6bf8dbb7", + "url": "https://files.pythonhosted.org/packages/14/4b/ead00905132820b623732b175d66354e9d3e69fcf2a5dcdab780664e7896/google_api_core-2.25.1-py3-none-any.whl" + }, + { + "algorithm": "sha256", + "hash": "d2aaa0b13c78c61cb3f4282c464c046e45fbd75755683c9c525e6e8f7ed0a5e8", + "url": "https://files.pythonhosted.org/packages/dc/21/e9d043e88222317afdbdb567165fdbc3b0aad90064c7e0c9eb0ad9955ad8/google_api_core-2.25.1.tar.gz" + } + ], + "project_name": "google-api-core", + "requires_dists": [ + "google-auth<3.0.0,>=2.14.1", + "google-auth[aiohttp]<3.0.0,>=2.35.0; extra == \"async-rest\"", + "googleapis-common-protos<2.0.0,>=1.56.2", + "grpcio-gcp<1.0.0,>=0.2.2; extra == \"grpcgcp\"", + "grpcio-gcp<1.0.0,>=0.2.2; extra == \"grpcio-gcp\"", + "grpcio-status<2.0.0,>=1.33.2; extra == \"grpc\"", + "grpcio-status<2.0.0,>=1.49.1; python_version >= \"3.11\" and extra == \"grpc\"", + "grpcio<2.0.0,>=1.33.2; extra == \"grpc\"", + "grpcio<2.0.0,>=1.49.1; python_version >= \"3.11\" and extra == \"grpc\"", + "proto-plus<2.0.0,>=1.22.3", + "proto-plus<2.0.0,>=1.25.0; python_version >= \"3.13\"", + "protobuf!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<7.0.0,>=3.19.5", + "requests<3.0.0,>=2.18.0" + ], + "requires_python": ">=3.7", + "version": "2.25.1" + }, + { + "artifacts": [ + { + "algorithm": "sha256", + "hash": "1370d4593e86213563547f97a92752fc658456fe4514c809544f330fed45a7ca", + "url": "https://files.pythonhosted.org/packages/17/63/b19553b658a1692443c62bd07e5868adaa0ad746a0751ba62c59568cd45b/google_auth-2.40.3-py2.py3-none-any.whl" + }, + { + "algorithm": "sha256", + "hash": "500c3a29adedeb36ea9cf24b8d10858e152f2412e3ca37829b3fa18e33d63b77", + "url": "https://files.pythonhosted.org/packages/9e/9b/e92ef23b84fa10a64ce4831390b7a4c2e53c0132568d99d4ae61d04c8855/google_auth-2.40.3.tar.gz" + } + ], + "project_name": "google-auth", + "requires_dists": [ + "aiohttp<3.10.0; extra == \"testing\"", + "aiohttp<4.0.0,>=3.6.2; extra == \"aiohttp\"", + "aiohttp<4.0.0,>=3.6.2; extra == \"testing\"", + "aioresponses; extra == \"testing\"", + "cachetools<6.0,>=2.0.0", + "cryptography; extra == \"enterprise-cert\"", + "cryptography<39.0.0; python_version < \"3.8\" and extra == \"pyjwt\"", + "cryptography<39.0.0; python_version < \"3.8\" and extra == \"pyopenssl\"", + "cryptography<39.0.0; python_version < \"3.8\" and extra == \"testing\"", + "cryptography>=38.0.3; extra == \"pyjwt\"", + "cryptography>=38.0.3; extra == \"pyopenssl\"", + "cryptography>=38.0.3; extra == \"testing\"", + "flask; extra == \"testing\"", + "freezegun; extra == \"testing\"", + "grpcio; extra == \"testing\"", + "mock; extra == \"testing\"", + "oauth2client; extra == \"testing\"", + "packaging; extra == \"testing\"", + "packaging; extra == \"urllib3\"", + "pyasn1-modules>=0.2.1", + "pyjwt>=2.0; extra == \"pyjwt\"", + "pyjwt>=2.0; extra == \"testing\"", + "pyopenssl; extra == \"enterprise-cert\"", + "pyopenssl<24.3.0; extra == \"testing\"", + "pyopenssl>=20.0.0; extra == \"pyopenssl\"", + "pyopenssl>=20.0.0; extra == \"testing\"", + "pytest-asyncio; extra == \"testing\"", + "pytest-cov; extra == \"testing\"", + "pytest-localserver; extra == \"testing\"", + "pytest; extra == \"testing\"", + "pyu2f>=0.1.5; extra == \"reauth\"", + "pyu2f>=0.1.5; extra == \"testing\"", + "requests<3.0.0,>=2.20.0; extra == \"aiohttp\"", + "requests<3.0.0,>=2.20.0; extra == \"requests\"", + "requests<3.0.0,>=2.20.0; extra == \"testing\"", + "responses; extra == \"testing\"", + "rsa<5,>=3.1.4", + "urllib3; extra == \"testing\"", + "urllib3; extra == \"urllib3\"" + ], + "requires_python": ">=3.7", + "version": "2.40.3" + }, { "artifacts": [ { @@ -3408,13 +3550,13 @@ "artifacts": [ { "algorithm": "sha256", - "hash": "110767eb83e6da2cc99cfc61958631b5c36624758b52e7af35ec5550ad846cb3", - "url": "https://files.pythonhosted.org/packages/ae/d1/b2b2ea7b443c6b028aca209d2e653256912906900cc146e64c65201211b7/langsmith-0.4.30-py3-none-any.whl" + "hash": "64f340bdead21defe5f4a6ca330c11073e35444989169f669508edf45a19025f", + "url": "https://files.pythonhosted.org/packages/3e/8e/e7a43d907a147e1f87eebdd6737483f9feba52a5d4b20f69d0bd6f2fa22f/langsmith-0.4.31-py3-none-any.whl" }, { "algorithm": "sha256", - "hash": "388fe1060aca6507be41f417c7d4168a92dffe27f28bb6ef8a1bfee4a59f3681", - "url": "https://files.pythonhosted.org/packages/fe/d5/4cc88f246ce615a518a715cd2bf40001d1678ad6805a3706a90570adca8f/langsmith-0.4.30.tar.gz" + "hash": "5fb3729e22bd9a225391936cb9d1080322e6c375bb776514af06b56d6c46ed3e", + "url": "https://files.pythonhosted.org/packages/55/f5/edbdf89a162ee025348b3b2080fb3b88f4a1040a5a186f32d34aca913994/langsmith-0.4.31.tar.gz" } ], "project_name": "langsmith", @@ -3437,19 +3579,19 @@ "zstandard>=0.23.0" ], "requires_python": ">=3.9", - "version": "0.4.30" + "version": "0.4.31" }, { "artifacts": [ { "algorithm": "sha256", - "hash": "f0c8c6bcfa2c9cd9e9fa0304f9a94894d252e7c74f118c37a8f2e4e525b2592b", - "url": "https://files.pythonhosted.org/packages/81/b2/122602255b582fdcf630f8e44b5c9175391abe10be5e2f4db6a7d4173df1/litellm-1.77.3-py3-none-any.whl" + "hash": "66c2bb776f1e19ceddfa977a2bbf7f05e6f26c4b1fec8b2093bd171d842701b8", + "url": "https://files.pythonhosted.org/packages/3c/32/90f8587818d146d604ed6eec95f96378363fda06b14817399cc68853383e/litellm-1.77.4-py3-none-any.whl" }, { "algorithm": "sha256", - "hash": "d8f9d674ef4e7673b1af02428fde27de5a8e84ca7268f003902340586aac7d96", - "url": "https://files.pythonhosted.org/packages/92/86/8bfd372d3d437b773b4b81d6da35674a569c10a9b805409257790e3af271/litellm-1.77.3.tar.gz" + "hash": "ce652e10ecf5b36767bfdf58e53b2802e22c3de383b03554e6ee1a4a66fa743d", + "url": "https://files.pythonhosted.org/packages/ab/b7/0d3c6dbcff3064238d123f90ae96764a85352f3f5caab6695a55007fd019/litellm-1.77.4.tar.gz" } ], "project_name": "litellm", @@ -3502,7 +3644,7 @@ "websockets<14.0.0,>=13.1.0; extra == \"proxy\"" ], "requires_python": "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,>=3.8", - "version": "1.77.3" + "version": "1.77.4" }, { "artifacts": [ @@ -4631,13 +4773,13 @@ "artifacts": [ { "algorithm": "sha256", - "hash": "6374c9bb4f2a371b3583c09786112ba85b006516745689c172a7e28af4d4d1a2", - "url": "https://files.pythonhosted.org/packages/1b/af/d0a23c8fdec4c8ddb771191d9b36a57fbce6741835a78f1b18ab6d15ae7d/ollama-0.5.4-py3-none-any.whl" + "hash": "534511b3ccea2dff419ae06c3b58d7f217c55be7897c8ce5868dfb6b219cf7a0", + "url": "https://files.pythonhosted.org/packages/b5/c1/edc9f41b425ca40b26b7c104c5f6841a4537bb2552bfa6ca66e81405bb95/ollama-0.6.0-py3-none-any.whl" }, { "algorithm": "sha256", - "hash": "75857505a5d42e5e58114a1b78cc8c24596d8866863359d8a2329946a9b6d6f3", - "url": "https://files.pythonhosted.org/packages/72/62/a36be4555e4218d6c8b35e72e0dfe0823845400097275cd81c9aec4ddf39/ollama-0.5.4.tar.gz" + "hash": "da2b2d846b5944cfbcee1ca1e6ee0585f6c9d45a2fe9467cbcd096a37383da2f", + "url": "https://files.pythonhosted.org/packages/d6/47/f9ee32467fe92744474a8c72e138113f3b529fc266eea76abfdec9a33f3b/ollama-0.6.0.tar.gz" } ], "project_name": "ollama", @@ -4646,7 +4788,7 @@ "pydantic>=2.9" ], "requires_python": ">=3.8", - "version": "0.5.4" + "version": "0.6.0" }, { "artifacts": [ @@ -5970,6 +6112,27 @@ "requires_python": ">=3.9", "version": "0.3.2" }, + { + "artifacts": [ + { + "algorithm": "sha256", + "hash": "13285478c2dcf2abb829db158e1047e2f1e8d63a077d94263c2b88b043c75a66", + "url": "https://files.pythonhosted.org/packages/4e/6d/280c4c2ce28b1593a19ad5239c8b826871fc6ec275c21afc8e1820108039/proto_plus-1.26.1-py3-none-any.whl" + }, + { + "algorithm": "sha256", + "hash": "21a515a4c4c0088a773899e23c7bbade3d18f9c66c73edd4c7ee3816bc96a012", + "url": "https://files.pythonhosted.org/packages/f4/ac/87285f15f7cce6d4a008f33f1757fb5a13611ea8914eb58c3d0d26243468/proto_plus-1.26.1.tar.gz" + } + ], + "project_name": "proto-plus", + "requires_dists": [ + "google-api-core>=1.31.5; extra == \"testing\"", + "protobuf<7.0.0,>=3.19.0" + ], + "requires_python": ">=3.7", + "version": "1.26.1" + }, { "artifacts": [ { @@ -6342,6 +6505,26 @@ "requires_python": ">=3.8", "version": "0.6.1" }, + { + "artifacts": [ + { + "algorithm": "sha256", + "hash": "29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a", + "url": "https://files.pythonhosted.org/packages/47/8d/d529b5d697919ba8c11ad626e835d4039be708a35b0d22de83a269a6682c/pyasn1_modules-0.4.2-py3-none-any.whl" + }, + { + "algorithm": "sha256", + "hash": "677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6", + "url": "https://files.pythonhosted.org/packages/e9/e6/78ebbb10a8c8e4b61a59249394a4a594c1a7af95593dc933a349c8d00964/pyasn1_modules-0.4.2.tar.gz" + } + ], + "project_name": "pyasn1-modules", + "requires_dists": [ + "pyasn1<0.7.0,>=0.6.1" + ], + "requires_python": ">=3.8", + "version": "0.4.2" + }, { "artifacts": [ { @@ -6617,13 +6800,13 @@ "artifacts": [ { "algorithm": "sha256", - "hash": "a60952460b99cf661dc25c29c0ef171721f98bfcb52ef8d9ea4c943d7c8cc796", - "url": "https://files.pythonhosted.org/packages/58/f0/427018098906416f580e3cf1366d3b1abfb408a0652e9f31600c24a1903c/pydantic_settings-2.10.1-py3-none-any.whl" + "hash": "fe2cea3413b9530d10f3a5875adffb17ada5c1e1bab0b2885546d7310415207c", + "url": "https://files.pythonhosted.org/packages/83/d6/887a1ff844e64aa823fb4905978d882a633cfe295c32eacad582b78a7d8b/pydantic_settings-2.11.0-py3-none-any.whl" }, { "algorithm": "sha256", - "hash": "06f0062169818d0f5524420a360d632d5857b83cffd4d42fe29597807a1614ee", - "url": "https://files.pythonhosted.org/packages/68/85/1ea668bbab3c50071ca613c6ab30047fb36ab0da1b92fa8f17bbc38fd36c/pydantic_settings-2.10.1.tar.gz" + "hash": "d0e87a1c7d33593beb7194adb8470fc426e95ba02af83a0f23474a04c9a08180", + "url": "https://files.pythonhosted.org/packages/20/c5/dbbc27b814c71676593d1c3f718e6cd7d4f00652cefa24b75f7aa3efb25e/pydantic_settings-2.11.0.tar.gz" } ], "project_name": "pydantic-settings", @@ -6640,7 +6823,7 @@ "typing-inspection>=0.4.0" ], "requires_python": ">=3.9", - "version": "2.10.1" + "version": "2.11.0" }, { "artifacts": [ @@ -7789,13 +7972,13 @@ "artifacts": [ { "algorithm": "sha256", - "hash": "2324aea8573a3fa1576df7fb4d65c4eb8d9929c8fa5939647397a07179eef8d0", - "url": "https://files.pythonhosted.org/packages/7a/84/bde4c4bbb269b71bc09316af8eb00da91f67814d40337cc12ef9c8742541/sentry_sdk-2.38.0-py2.py3-none-any.whl" + "hash": "ba655ca5e57b41569b18e2a5552cb3375209760a5d332cdd87c6c3f28f729602", + "url": "https://files.pythonhosted.org/packages/dd/44/4356cc64246ba7b2b920f7c97a85c3c52748e213e250b512ee8152eb559d/sentry_sdk-2.39.0-py2.py3-none-any.whl" }, { "algorithm": "sha256", - "hash": "792d2af45e167e2f8a3347143f525b9b6bac6f058fb2014720b40b84ccbeb985", - "url": "https://files.pythonhosted.org/packages/b2/22/60fd703b34d94d216b2387e048ac82de3e86b63bc28869fb076f8bb0204a/sentry_sdk-2.38.0.tar.gz" + "hash": "8c185854d111f47f329ab6bc35993f28f7a6b7114db64aa426b326998cfa14e9", + "url": "https://files.pythonhosted.org/packages/4c/72/43294fa4bdd75c51610b5104a3ff834459ba653abb415150aa7826a249dd/sentry_sdk-2.39.0.tar.gz" } ], "project_name": "sentry-sdk", @@ -7851,7 +8034,7 @@ "urllib3>=1.26.11" ], "requires_python": ">=3.6", - "version": "2.38.0" + "version": "2.39.0" }, { "artifacts": [ @@ -9821,6 +10004,7 @@ "pip_version": "24.2", "prefer_older_binary": false, "requirements": [ + "a2a-sdk>=0.3.0", "acp-sdk>=0.8.1", "aiosqlite", "ansicolors==1.1.8",