Skip to content

Commit 5d83c2b

Browse files
committed
Update GraphRAG to restore full LC compatibility
1 parent 98167ff commit 5d83c2b

File tree

3 files changed

+15
-6
lines changed

3 files changed

+15
-6
lines changed

examples/README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -142,7 +142,7 @@ are listed in [the last section of this file](#customize).
142142

143143
### Answer: GraphRAG
144144

145-
- [LangChain compatibility](./customize/answer/langchain_compatiblity.py)
145+
- [LangChain compatibility](customize/answer/langchain_compatibility.py)
146146
- [Use a custom prompt](./customize/answer/custom_prompt.py)
147147

148148

File renamed without changes.

src/neo4j_graphrag/generation/graphrag.py

Lines changed: 14 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@
2727
from neo4j_graphrag.generation.prompts import RagTemplate
2828
from neo4j_graphrag.generation.types import RagInitModel, RagResultModel, RagSearchModel
2929
from neo4j_graphrag.llm import LLMInterface
30+
from neo4j_graphrag.llm.utils import legacy_inputs_to_messages
3031
from neo4j_graphrag.message_history import MessageHistory
3132
from neo4j_graphrag.retrievers.base import Retriever
3233
from neo4j_graphrag.types import LLMMessage, RetrieverResult
@@ -145,12 +146,17 @@ def search(
145146
prompt = self.prompt_template.format(
146147
query_text=query_text, context=context, examples=validated_data.examples
147148
)
149+
150+
messages = legacy_inputs_to_messages(
151+
prompt,
152+
message_history=message_history,
153+
system_instruction=self.prompt_template.system_instructions,
154+
)
155+
148156
logger.debug(f"RAG: retriever_result={prettify(retriever_result)}")
149157
logger.debug(f"RAG: prompt={prompt}")
150158
llm_response = self.llm.invoke(
151-
prompt,
152-
message_history,
153-
system_instruction=self.prompt_template.system_instructions,
159+
messages,
154160
)
155161
answer = llm_response.content
156162
result: dict[str, Any] = {"answer": answer}
@@ -168,9 +174,12 @@ def _build_query(
168174
summarization_prompt = self._chat_summary_prompt(
169175
message_history=message_history
170176
)
171-
summary = self.llm.invoke(
172-
input=summarization_prompt,
177+
messages = legacy_inputs_to_messages(
178+
summarization_prompt,
173179
system_instruction=summary_system_message,
180+
)
181+
summary = self.llm.invoke(
182+
messages,
174183
).content
175184
return self.conversation_prompt(summary=summary, current_query=query_text)
176185
return query_text

0 commit comments

Comments
 (0)