@@ -63,6 +63,8 @@ class LLMOptions:
63
63
temperature : float | None
64
64
parallel_tool_calls : bool | None
65
65
tool_choice : Union [ToolChoice , Literal ["auto" , "required" , "none" ]] = "auto"
66
+ store : bool | None = None
67
+ metadata : dict [str , str ] | None = None
66
68
67
69
68
70
class LLM (llm .LLM ):
@@ -77,6 +79,8 @@ def __init__(
77
79
temperature : float | None = None ,
78
80
parallel_tool_calls : bool | None = None ,
79
81
tool_choice : Union [ToolChoice , Literal ["auto" , "required" , "none" ]] = "auto" ,
82
+ store : bool | None = None ,
83
+ metadata : dict [str , str ] | None = None ,
80
84
) -> None :
81
85
"""
82
86
Create a new instance of OpenAI LLM.
@@ -93,6 +97,8 @@ def __init__(
93
97
temperature = temperature ,
94
98
parallel_tool_calls = parallel_tool_calls ,
95
99
tool_choice = tool_choice ,
100
+ store = store ,
101
+ metadata = metadata ,
96
102
)
97
103
self ._client = client or openai .AsyncClient (
98
104
api_key = api_key ,
@@ -738,6 +744,8 @@ async def _run(self) -> None:
738
744
stream_options = {"include_usage" : True },
739
745
stream = True ,
740
746
user = user ,
747
+ store = self ._llm ._opts .store ,
748
+ metadata = self ._llm ._opts .metadata ,
741
749
** opts ,
742
750
)
743
751
0 commit comments