Skip to content
Open
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ dependencies = [
"langchain>=0.3.7",
"openai>=1.58.1",
"pydantic>=2.9.2",
"og-test-v2-x402==0.0.11"
"og-test-v2-x402==0.0.12.dev3"
]

[project.scripts]
Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -7,4 +7,4 @@ requests>=2.32.3
langchain>=0.3.7
openai>=1.58.1
pydantic>=2.9.2
og-test-v2-x402==0.0.11
og-test-v2-x402==0.0.12.dev3
18 changes: 15 additions & 3 deletions src/opengradient/agents/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,25 +6,37 @@
into existing applications and agent frameworks.
"""

from ..client.llm import LLM
from ..types import TEE_LLM, x402SettlementMode
from .og_langchain import *


def langchain_adapter(
private_key: str,
model_cid: TEE_LLM,
private_key: str | None = None,
model_cid: TEE_LLM | str | None = None,
model: TEE_LLM | str | None = None,
max_tokens: int = 300,
temperature: float = 0.0,
x402_settlement_mode: x402SettlementMode = x402SettlementMode.BATCH_HASHED,
client: LLM | None = None,
rpc_url: str | None = None,
tee_registry_address: str | None = None,
llm_server_url: str | None = None,
) -> OpenGradientChatModel:
"""
Returns an OpenGradient LLM that implements LangChain's LLM interface
and can be plugged into LangChain agents.
"""
return OpenGradientChatModel(
private_key=private_key,
model_cid=model_cid,
client=client,
model_cid=model_cid or model,
max_tokens=max_tokens,
temperature=temperature,
x402_settlement_mode=x402_settlement_mode,
rpc_url=rpc_url,
tee_registry_address=tee_registry_address,
llm_server_url=llm_server_url,
)


Expand Down
Loading
Loading