Skip to content

Commit d0079cb

Browse files
authored
fix: Dont stream llm calls which are not saved in state (#418)
* fix: Dont stream llm calls which are not saved in state * format n lint
1 parent 6119e1e commit d0079cb

File tree

3 files changed

+7
-2
lines changed

3 files changed

+7
-2
lines changed

.gitignore

+1
Original file line numberDiff line numberDiff line change
@@ -155,3 +155,4 @@ node_modules/
155155
notebooks
156156

157157
.langgraph-data/
158+
.langgraph_api/

backend/retrieval_graph/graph.py

+3-1
Original file line numberDiff line numberDiff line change
@@ -144,7 +144,9 @@ class Plan(TypedDict):
144144
messages = [
145145
{"role": "system", "content": configuration.research_plan_system_prompt}
146146
] + state.messages
147-
response = cast(Plan, await model.ainvoke(messages))
147+
response = cast(
148+
Plan, await model.ainvoke(messages, {"tags": ["langsmith:nostream"]})
149+
)
148150
return {
149151
"steps": response["steps"],
150152
"documents": "delete",

backend/retrieval_graph/researcher_graph/graph.py

+3-1
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,9 @@ class Response(TypedDict):
4242
{"role": "system", "content": configuration.generate_queries_system_prompt},
4343
{"role": "human", "content": state.question},
4444
]
45-
response = cast(Response, await model.ainvoke(messages))
45+
response = cast(
46+
Response, await model.ainvoke(messages, {"tags": ["langsmith:nostream"]})
47+
)
4648
return {"queries": response["queries"]}
4749

4850

0 commit comments

Comments
 (0)