Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Memosapi0305 #41

Open
wants to merge 4 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions cerebrum/example/run_agent.sh
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
# run local agent
python cerebrum/run_agent.py \
--mode local \
--path cerebrum/example/agents/test_agent \
--task "What is the capital of United States?"
--agent_path cerebrum/example/agents/test_agent \
--task_input "What is the capital of United States?"

# run remote agent
python cerebrum/run_agent.py \
Expand Down
86 changes: 86 additions & 0 deletions cerebrum/llm/apis.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,34 @@ class LLMQuery(Query):
- "tool_use": Using external tools
- "operate_file": File operations
message_return_type: Desired format of the response
response_format: Specific json format of the response, e.g.,

{"type": "json_schema", "json_schema": {"name": "response",
"schema": {
"type": "object",
"properties": {
"keywords": {
"type": "array",
"items": {
"type": "string"
}
},
"context": {
"type": "string",
},
"tags": {
"type": "array",
"items": {
"type": "string"
}
},
},
"required": ["keywords", "context", "tags"],
"additionalProperties": False
},
"strict": True
}


Examples:
```python
Expand Down Expand Up @@ -113,6 +141,7 @@ class LLMQuery(Query):
tools: Optional[List[Dict[str, Any]]] = Field(default_factory=list)
action_type: Literal["chat", "tool_use", "operate_file"] = Field(default="chat")
message_return_type: str = Field(default="text")
response_format: Dict[str, Dict] = None

class Config:
arbitrary_types_allowed = True
Expand Down Expand Up @@ -226,6 +255,63 @@ def llm_chat(
)
return send_request(agent_name, query, base_url)

def llm_chat_with_json_output(
agent_name: str,
messages: List[Dict[str, Any]],
base_url: str = aios_kernel_url,
llms: List[Dict[str, Any]] = None,
response_format: Dict[str, Dict] = None
) -> LLMResponse:
"""
Perform a chat interaction with the LLM.

Args:
agent_name: Name of the agent making the request
messages: List of message dictionaries with format:
[
{
"role": "system"|"user"|"assistant",
"content": str,
"name": str # Optional
}
]
base_url: API base URL
llms: Optional list of LLM configurations

Returns:
LLMResponse containing the generated response

Examples:
```python
response = llm_chat(
"agent1",
messages=[
{
"role": "system",
"content": "You are a helpful assistant."
},
{
"role": "user",
"content": "Explain quantum computing."
}
],
llms=[{
"name": "gpt-4",
"temperature": 0.7
}]
)
```
"""
query = LLMQuery(
llms=llms,
messages=messages,
tools=None,
message_return_type="json",
action_type="chat",
response_format=response_format
)
return send_request(agent_name, query, base_url)

def llm_call_tool(
agent_name: str,
messages: List[Dict[str, Any]],
Expand Down
6 changes: 3 additions & 3 deletions cerebrum/memory/apis.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,14 +163,14 @@ def delete_memory(agent_name: str,

def search_memories(agent_name: str,
query: str,
limit: int = 5,
k: int = 5,
base_url: str = aios_kernel_url) -> MemoryResponse:
"""Search for memories using a hybrid retrieval approach.

Args:
agent_name: Name of the agent to handle the request
query: Search query text
limit: Maximum number of results to return
k: Maximum number of results to return
base_url: Base URL for the API server

Returns:
Expand All @@ -189,6 +189,6 @@ def search_memories(agent_name: str,
"""
query = MemoryQuery(
action_type="search",
params={"query": query, "limit": limit}
params={"query": query, "k": k}
)
return send_request(agent_name, query, base_url)