Skip to content

Commit 3c1a286

Browse files
committed
feat(NGUI-223): a2a protocol support
1 parent a7e4145 commit 3c1a286

File tree

12 files changed

+919
-25
lines changed

12 files changed

+919
-25
lines changed

libs/3rdparty/python/BUILD

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,11 @@ python_requirements(
3939
source="rich-requirements.txt",
4040
)
4141

42+
python_requirements(
43+
name="a2a",
44+
source="a2a-requirements.txt",
45+
)
46+
4247
python_requirements(
4348
name="mcp",
4449
source="mcp-requirements.txt",
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
a2a-sdk>=0.3.0

libs/next_gen_ui_a2a/BUILD

Lines changed: 44 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,44 @@
1+
# This target sets the metadata for all the Python non-test files in this directory.
2+
python_sources(
3+
name="lib",
4+
dependencies=[
5+
"libs/3rdparty/python:a2a",
6+
],
7+
)
8+
9+
# This target sets the metadata for all the Python test files in this directory.
10+
python_tests(
11+
name="tests",
12+
dependencies=[
13+
"libs/3rdparty/python:a2a",
14+
],
15+
)
16+
17+
# This target allows us to build a `.whl` bdist and a `.tar.gz` sdist by auto-generating
18+
# `setup.py`. See https://www.pantsbuild.org/docs/python-distributions.
19+
#
20+
# Because this target has no source code, Pants cannot infer dependencies. We depend on `:lib`,
21+
# which means we'll include all the non-test Python files in this directory, and any of
22+
# their dependencies.
23+
python_distribution(
24+
name="dist",
25+
dependencies=[
26+
":lib",
27+
],
28+
provides=python_artifact(
29+
name="next_gen_ui_a2a",
30+
version=env("VERSION"),
31+
license="Apache-2.0",
32+
description="A2A integration for Next Gen UI Agent",
33+
long_description_content_type="text/markdown",
34+
classifiers=[
35+
"Programming Language :: Python :: 3",
36+
"Programming Language :: Python :: 3.12",
37+
"Programming Language :: Python :: 3.13",
38+
"License :: OSI Approved :: Apache Software License",
39+
],
40+
url="https://github.com/RedHat-UX/next-gen-ui-agent",
41+
),
42+
long_description_path="libs/next_gen_ui_a2a/README.md",
43+
generate_setup=True,
44+
)

libs/next_gen_ui_a2a/Containerfile

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
FROM registry.access.redhat.com/ubi9/python-312
2+
3+
# Set work directory
4+
WORKDIR /opt/app-root/
5+
6+
# Install dependencies
7+
RUN pip install a2a-sdk a2a-sdk[http-server] uvicorn langchain_openai
8+
9+
10+
# Copy Python Project Files (Container context must be the `python` directory)
11+
COPY . /opt/app-root
12+
13+
USER root
14+
15+
# Install next_gen_ui dependencies
16+
RUN pip install \
17+
/opt/ngui-dist/next_gen_ui_agent*.whl \
18+
/opt/ngui-dist/next_gen_ui_rhds*.whl
19+
20+
21+
# Allow non-root user to access the everything in app-root
22+
RUN chgrp -R root /opt/app-root/ && chmod -R g+rwx /opt/app-root/
23+
24+
# Expose default port (change if needed)
25+
EXPOSE 9999
26+
27+
USER 1001
28+
29+
# Run the agent
30+
CMD python .

libs/next_gen_ui_a2a/README.md

Lines changed: 222 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,222 @@
1+
# Next Gen UI Agent A2A Protocol Integration
2+
3+
[A2A Protocol](https://a2a-protocol.org/) provides standard how to communicate with agent
4+
and provides interoparability by client SDKs in different languages.
5+
6+
This package provides resp. helps you build:
7+
1. Standard A2A API to the Next Gen UI agent
8+
2. HTTP Server to run the A2A API and execute the agent
9+
3. Docker image
10+
11+
To interact with agent via A2A protocol use any A2A client implemntation.
12+
13+
## Installation
14+
15+
```sh
16+
pip install -U next_gen_ui_a2a
17+
```
18+
19+
## Example
20+
21+
### Run A2A server with Next Gen UI agent
22+
23+
```py
24+
import uvicorn
25+
from a2a.server.apps import A2AStarletteApplication
26+
from a2a.server.request_handlers import DefaultRequestHandler
27+
from a2a.server.tasks import InMemoryTaskStore
28+
from langchain_openai import ChatOpenAI
29+
30+
from next_gen_ui_a2a.agent_card import card
31+
from next_gen_ui_a2a.agent_executor import NextGenUIAgentExecutor
32+
from next_gen_ui_agent.model import LangChainModelInference
33+
from next_gen_ui_agent.types import AgentConfig
34+
35+
if not os.environ.get("OPENAI_API_KEY"):
36+
os.environ["OPENAI_API_KEY"] = "ollama"
37+
model = os.getenv("INFERENCE_MODEL", "llama3.2")
38+
base_url = os.getenv("OPEN_API_URL", "http://localhost:11434/v1")
39+
40+
# Create Chat API used by next_gen_ui agent
41+
llm = ChatOpenAI(model=model, base_url=base_url)
42+
inference = LangChainModelInference(llm)
43+
config = AgentConfig(inference=inference)
44+
45+
request_handler = DefaultRequestHandler(
46+
agent_executor=NextGenUIAgentExecutor(config),
47+
task_store=InMemoryTaskStore(),
48+
)
49+
50+
server = A2AStarletteApplication(
51+
agent_card=card,
52+
http_handler=request_handler,
53+
)
54+
55+
uvicorn.run(server.build(), host="0.0.0.0", port=9999)
56+
```
57+
58+
### Run A2A client
59+
60+
```py
61+
import logging
62+
from uuid import uuid4
63+
64+
import httpx
65+
from a2a.client import A2ACardResolver, A2AClient
66+
from a2a.types import ( # SendStreamingMessageRequest,
67+
AgentCard,
68+
Message,
69+
MessageSendParams,
70+
Part,
71+
Role,
72+
SendMessageRequest,
73+
TextPart,
74+
)
75+
from a2a.utils.constants import AGENT_CARD_WELL_KNOWN_PATH
76+
77+
78+
async def main() -> None:
79+
# Configure logging to show INFO level messages
80+
logging.basicConfig(level=logging.INFO)
81+
logger = logging.getLogger(__name__) # Get a logger instance
82+
83+
base_url = "http://localhost:9999"
84+
85+
async with httpx.AsyncClient(timeout=120) as httpx_client:
86+
# Initialize A2ACardResolver
87+
resolver = A2ACardResolver(
88+
httpx_client=httpx_client,
89+
base_url=base_url,
90+
# agent_card_path uses default, extended_agent_card_path also uses default
91+
)
92+
93+
# Fetch Public Agent Card and Initialize Client
94+
final_agent_card_to_use: AgentCard | None = None
95+
96+
try:
97+
logger.info(
98+
f"Attempting to fetch public agent card from: {base_url}{AGENT_CARD_WELL_KNOWN_PATH}"
99+
)
100+
_public_card = (
101+
await resolver.get_agent_card()
102+
) # Fetches from default public path
103+
logger.info("Successfully fetched public agent card:")
104+
logger.info(_public_card.model_dump_json(indent=2, exclude_none=True))
105+
final_agent_card_to_use = _public_card
106+
logger.info(
107+
"\nUsing PUBLIC agent card for client initialization (default)."
108+
)
109+
110+
except Exception as e:
111+
logger.exception("Critical error fetching public agent card")
112+
raise RuntimeError(
113+
"Failed to fetch the public agent card. Cannot continue."
114+
) from e
115+
116+
client = A2AClient(
117+
httpx_client=httpx_client,
118+
agent_card=final_agent_card_to_use,
119+
)
120+
logger.info("A2AClient initialized.")
121+
122+
movies_data = {
123+
"movie": {
124+
"languages": ["English"],
125+
"year": 1995,
126+
"imdbId": "0114709",
127+
"runtime": 81,
128+
"imdbRating": 8.3,
129+
"movieId": "1",
130+
"countries": ["USA"],
131+
"imdbVotes": 591836,
132+
"title": "Toy Story",
133+
"url": "https://themoviedb.org/movie/862",
134+
"revenue": 373554033,
135+
"tmdbId": "862",
136+
"plot": "A cowboy doll is profoundly threatened and jealous when a new spaceman figure supplants him as top toy in a boy's room.",
137+
"posterUrl": "https://image.tmdb.org/t/p/w440_and_h660_face/uXDfjJbdP4ijW5hWSBrPrlKpxab.jpg",
138+
"released": "2022-11-02",
139+
"trailerUrl": "https://www.youtube.com/watch?v=v-PjgYDrg70",
140+
"budget": 30000000,
141+
},
142+
"actors": ["Jim Varney", "Tim Allen", "Tom Hanks", "Don Rickles"],
143+
}
144+
145+
message = Message(
146+
role=Role.user,
147+
parts=[
148+
Part(
149+
root=TextPart(
150+
text="Tell me details about Toy Story",
151+
metadata={
152+
"data": movies_data,
153+
"type": "search_movie",
154+
},
155+
)
156+
),
157+
# Part(root=DataPart(data=movies_data)),
158+
],
159+
message_id=str(uuid4()),
160+
)
161+
request = SendMessageRequest(
162+
id=str(uuid4()), params=MessageSendParams(message=message)
163+
)
164+
165+
response = await client.send_message(request)
166+
logger.info("Execution finished.")
167+
print(response.model_dump(mode="json", exclude_none=True))
168+
169+
# streaming_request = SendStreamingMessageRequest(
170+
# id=str(uuid4()), params=MessageSendParams(message=message)
171+
# )
172+
# stream_response = client.send_message_streaming(streaming_request)
173+
# async for chunk in stream_response:
174+
# print(chunk.model_dump(mode="json", exclude_none=True))
175+
176+
177+
if __name__ == "__main__":
178+
import asyncio
179+
180+
asyncio.run(main())
181+
```
182+
183+
184+
## Build Container Image
185+
186+
Agent can also be built using a container file.
187+
188+
1. Build project
189+
190+
```sh
191+
pants package ::
192+
```
193+
194+
2. Navigate to the directory `libs/next_gen_ui_a2a` directory:
195+
196+
```sh
197+
cd libs/next_gen_ui_a2a
198+
```
199+
200+
3. Build the container file
201+
202+
```sh
203+
export PROJ_DIST_DIR=$(realpath ../../dist)
204+
podman build . -v $PROJ_DIST_DIR:/opt/ngui-dist:ro,z -t ngui-a2a-server
205+
```
206+
207+
> [!Tip]
208+
> Podman is a drop-in replacement for `docker` which can also be used in these commands.
209+
210+
3. Run you container
211+
212+
```bash
213+
podman run --rm -p 9999:9999 \
214+
-e INFERENCE_MODEL=llama3.2 \
215+
-e OPEN_API_URL=http://host.containers.internal:11434/v1 \
216+
ngui-a2a-server
217+
```
218+
4. Validate server A2A
219+
220+
```sh
221+
curl http://localhost:9999/.well-known/agent-card.json
222+
```

libs/next_gen_ui_a2a/__init__.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
from next_gen_ui_a2a.agent_executor import NextGenUIAgentExecutor
2+
3+
__all__ = [
4+
"NextGenUIAgentExecutor",
5+
]

libs/next_gen_ui_a2a/__main__.py

Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
import logging
2+
import os
3+
4+
import uvicorn # pants: no-infer-dep
5+
from a2a.server.apps import A2AStarletteApplication # pants: no-infer-dep
6+
from a2a.server.request_handlers import DefaultRequestHandler # pants: no-infer-dep
7+
from a2a.server.tasks import InMemoryTaskStore # pants: no-infer-dep
8+
from agent_card import card # type: ignore[import-not-found]
9+
from agent_executor import NextGenUIAgentExecutor # type: ignore[import-not-found]
10+
from langchain_openai import ChatOpenAI # pants: no-infer-dep
11+
from next_gen_ui_agent.model import LangChainModelInference
12+
from next_gen_ui_agent.types import AgentConfig
13+
14+
if __name__ == "__main__":
15+
logging.basicConfig(level=logging.INFO)
16+
logger = logging.getLogger(__name__)
17+
18+
if not os.environ.get("OPENAI_API_KEY"):
19+
os.environ["OPENAI_API_KEY"] = "ollama"
20+
model = os.getenv("INFERENCE_MODEL", "llama3.2")
21+
base_url = os.getenv("OPEN_API_URL", "http://localhost:11434/v1")
22+
23+
logger.info(
24+
"Starting Next Gen UI A2A Server. base_url=%s, model=%s", base_url, model
25+
)
26+
27+
llm = ChatOpenAI(model=model, base_url=base_url)
28+
inference = LangChainModelInference(llm)
29+
config = AgentConfig(inference=inference)
30+
31+
request_handler = DefaultRequestHandler(
32+
agent_executor=NextGenUIAgentExecutor(config),
33+
task_store=InMemoryTaskStore(),
34+
)
35+
36+
server = A2AStarletteApplication(
37+
agent_card=card,
38+
http_handler=request_handler,
39+
)
40+
41+
uvicorn.run(server.build(), host="0.0.0.0", port=9999)

libs/next_gen_ui_a2a/agent_card.py

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
from a2a.types import AgentCapabilities, AgentCard, AgentSkill
2+
3+
skill = AgentSkill(
4+
id="generate_ui_components",
5+
name="Generates UI component",
6+
description="Returns generated UI component",
7+
tags=["ui"],
8+
examples=[
9+
"First message TextPart should be user prompt. backend data could be passed as 'data' field in metadata or following DataParts"
10+
],
11+
)
12+
13+
# This will be the public-facing agent card
14+
card = AgentCard(
15+
name="Next Gen UI Agent",
16+
description="Generates UI component based on structured input data and user prompt",
17+
url="http://localhost:9999/",
18+
version="1.0.0",
19+
default_input_modes=["text"],
20+
default_output_modes=["text"],
21+
capabilities=AgentCapabilities(streaming=True),
22+
skills=[skill],
23+
supports_authenticated_extended_card=False,
24+
)

0 commit comments

Comments
 (0)