31
31
from mistralai import Mistral , Messages
32
32
from mistralai .models .sdkerror import SDKError
33
33
except ImportError :
34
- Mistral = None # type: ignore
35
- SDKError = None # type: ignore
34
+ Mistral = None
35
+ SDKError = None
36
36
37
37
38
38
class MistralAILLM (LLMInterface ):
@@ -64,7 +64,7 @@ def __init__(
64
64
api_key = os .getenv ("MISTRAL_API_KEY" , "" )
65
65
self .client = Mistral (api_key = api_key , ** kwargs )
66
66
67
- def get_messages (self , input : str , chat_history : list ) -> list [Messages ]:
67
+ def get_messages (self , input : str , chat_history : Optional [ list [ Any ]] = None ) -> list [Messages ]:
68
68
messages = []
69
69
if self .system_instruction :
70
70
messages .append (SystemMessage (content = self .system_instruction ).model_dump ())
@@ -78,7 +78,7 @@ def get_messages(self, input: str, chat_history: list) -> list[Messages]:
78
78
return messages
79
79
80
80
def invoke (
81
- self , input : str , chat_history : Optional [list [dict [ str , str ] ]] = None
81
+ self , input : str , chat_history : Optional [list [Any ]] = None
82
82
) -> LLMResponse :
83
83
"""Sends a text input to the Mistral chat completion model
84
84
and returns the response's content.
@@ -110,7 +110,7 @@ def invoke(
110
110
raise LLMGenerationError (e )
111
111
112
112
async def ainvoke (
113
- self , input : str , chat_history : Optional [list [dict [ str , str ] ]] = None
113
+ self , input : str , chat_history : Optional [list [Any ]] = None
114
114
) -> LLMResponse :
115
115
"""Asynchronously sends a text input to the MistralAI chat
116
116
completion model and returns the response's content.
0 commit comments