@@ -65,29 +65,29 @@ def __init__(
65
65
self .client = Mistral (api_key = api_key , ** kwargs )
66
66
67
67
def get_messages (
68
- self , input : str , chat_history : Optional [list [Any ]] = None
68
+ self , input : str , message_history : Optional [list [Any ]] = None
69
69
) -> list [Messages ]:
70
70
messages = []
71
71
if self .system_instruction :
72
72
messages .append (SystemMessage (content = self .system_instruction ).model_dump ())
73
- if chat_history :
73
+ if message_history :
74
74
try :
75
- MessageList (messages = chat_history )
75
+ MessageList (messages = message_history )
76
76
except ValidationError as e :
77
77
raise LLMGenerationError (e .errors ()) from e
78
- messages .extend (chat_history )
78
+ messages .extend (message_history )
79
79
messages .append (UserMessage (content = input ).model_dump ())
80
80
return messages
81
81
82
82
def invoke (
83
- self , input : str , chat_history : Optional [list [Any ]] = None
83
+ self , input : str , message_history : Optional [list [Any ]] = None
84
84
) -> LLMResponse :
85
85
"""Sends a text input to the Mistral chat completion model
86
86
and returns the response's content.
87
87
88
88
Args:
89
89
input (str): Text sent to the LLM.
90
- chat_history (Optional[list]): A collection previous messages, with each message having a specific role assigned.
90
+ message_history (Optional[list]): A collection previous messages, with each message having a specific role assigned.
91
91
92
92
Returns:
93
93
LLMResponse: The response from MistralAI.
@@ -96,7 +96,7 @@ def invoke(
96
96
LLMGenerationError: If anything goes wrong.
97
97
"""
98
98
try :
99
- messages = self .get_messages (input , chat_history )
99
+ messages = self .get_messages (input , message_history )
100
100
response = self .client .chat .complete (
101
101
model = self .model_name ,
102
102
messages = messages ,
@@ -112,14 +112,14 @@ def invoke(
112
112
raise LLMGenerationError (e )
113
113
114
114
async def ainvoke (
115
- self , input : str , chat_history : Optional [list [Any ]] = None
115
+ self , input : str , message_history : Optional [list [Any ]] = None
116
116
) -> LLMResponse :
117
117
"""Asynchronously sends a text input to the MistralAI chat
118
118
completion model and returns the response's content.
119
119
120
120
Args:
121
121
input (str): Text sent to the LLM.
122
- chat_history (Optional[list]): A collection previous messages, with each message having a specific role assigned.
122
+ message_history (Optional[list]): A collection previous messages, with each message having a specific role assigned.
123
123
124
124
Returns:
125
125
LLMResponse: The response from MistralAI.
@@ -128,7 +128,7 @@ async def ainvoke(
128
128
LLMGenerationError: If anything goes wrong.
129
129
"""
130
130
try :
131
- messages = self .get_messages (input , chat_history )
131
+ messages = self .get_messages (input , message_history )
132
132
response = await self .client .chat .complete_async (
133
133
model = self .model_name ,
134
134
messages = messages ,
0 commit comments