Skip to content

Commit 4c62827

Browse files
Formatting
1 parent 6aea7fa commit 4c62827

File tree

4 files changed

+32
-8
lines changed

4 files changed

+32
-8
lines changed

src/neo4j_graphrag/generation/graphrag.py

+3-1
Original file line numberDiff line numberDiff line change
@@ -154,7 +154,9 @@ def build_query(
154154
chat_history=chat_history
155155
)
156156
summary = self.llm.invoke(
157-
input=summarization_prompt, system_instruction=summarization_prompt.SYSTEM_MESSAGE).content
157+
input=summarization_prompt,
158+
system_instruction=summarization_prompt.SYSTEM_MESSAGE,
159+
).content
158160
return ConversationTemplate().format(
159161
summary=summary, current_query=query_text
160162
)

src/neo4j_graphrag/llm/base.py

+4-1
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,10 @@ def __init__(
4343

4444
@abstractmethod
4545
def invoke(
46-
self, input: str, chat_history: Optional[list[dict[str, str]]] = None, system_instruction: Optional[str] = None
46+
self,
47+
input: str,
48+
chat_history: Optional[list[dict[str, str]]] = None,
49+
system_instruction: Optional[str] = None,
4750
) -> LLMResponse:
4851
"""Sends a text input to the LLM and retrieves a response.
4952

src/neo4j_graphrag/llm/openai_llm.py

+13-3
Original file line numberDiff line numberDiff line change
@@ -61,10 +61,17 @@ def __init__(
6161
super().__init__(model_name, model_params, system_instruction)
6262

6363
def get_messages(
64-
self, input: str, chat_history: Optional[list[Any]] = None, system_instruction: Optional[str] = None
64+
self,
65+
input: str,
66+
chat_history: Optional[list[Any]] = None,
67+
system_instruction: Optional[str] = None,
6568
) -> Iterable[ChatCompletionMessageParam]:
6669
messages = []
67-
system_message = system_instruction if system_instruction is not None else self.system_instruction
70+
system_message = (
71+
system_instruction
72+
if system_instruction is not None
73+
else self.system_instruction
74+
)
6875
if system_message:
6976
messages.append(SystemMessage(content=system_message).model_dump())
7077
if chat_history:
@@ -77,7 +84,10 @@ def get_messages(
7784
return messages
7885

7986
def invoke(
80-
self, input: str, chat_history: Optional[list[Any]] = None, system_instruction: Optional[str] = None
87+
self,
88+
input: str,
89+
chat_history: Optional[list[Any]] = None,
90+
system_instruction: Optional[str] = None,
8191
) -> LLMResponse:
8292
"""Sends a text input to the OpenAI chat completion model
8393
and returns the response's content.

src/neo4j_graphrag/llm/vertexai_llm.py

+12-3
Original file line numberDiff line numberDiff line change
@@ -104,7 +104,10 @@ def get_messages(
104104
return messages
105105

106106
def invoke(
107-
self, input: str, chat_history: Optional[list[Any]] = None, system_instruction: Optional[str] = None
107+
self,
108+
input: str,
109+
chat_history: Optional[list[Any]] = None,
110+
system_instruction: Optional[str] = None,
108111
) -> LLMResponse:
109112
"""Sends text to the LLM and returns a response.
110113
@@ -116,9 +119,15 @@ def invoke(
116119
Returns:
117120
LLMResponse: The response from the LLM.
118121
"""
119-
system_message = system_instruction if system_instruction is not None else self.system_instruction
122+
system_message = (
123+
system_instruction
124+
if system_instruction is not None
125+
else self.system_instruction
126+
)
120127
self.model = GenerativeModel(
121-
model_name=self.model_name, system_instruction=[system_message], **self.model_params
128+
model_name=self.model_name,
129+
system_instruction=[system_message],
130+
**self.model_params,
122131
)
123132
try:
124133
messages = self.get_messages(input, chat_history)

0 commit comments

Comments
 (0)