From b2f38f576cf5b2aa14d6503e959ae156e864a52c Mon Sep 17 00:00:00 2001 From: Morten Rand-Hendriksen Date: Mon, 12 Aug 2024 17:19:58 +0000 Subject: [PATCH 01/15] Add node_modules to .gitignore --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 91ac0c7..3ef703f 100644 --- a/.gitignore +++ b/.gitignore @@ -9,3 +9,4 @@ cookbooks/python/openai/data/hotel_invoices/transformed_invoice_json/* cookbooks/python/openai/data/hotel_invoices/extracted_invoice_json/* cookbooks/python/openai/data/hotel_invoices/hotel_DB.db cookbooks/python/openai/hallucination_results.csv +node_modules \ No newline at end of file From 205c2a6ddda0efc5c2ec9c81c6b19d7c8c2f008b Mon Sep 17 00:00:00 2001 From: Morten Rand-Hendriksen Date: Mon, 12 Aug 2024 17:25:34 +0000 Subject: [PATCH 02/15] Update to Mistral client-python v 1.0.1 --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 72e2339..1e8a232 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ azure-ai-inference==1.0.0b3 openai==1.37.1 -mistralai==0.4.2 +mistralai==1.0.1 python-dotenv==1.0.1 \ No newline at end of file From 70d316fbc54401f09f8ed263cdc6bd6784396536 Mon Sep 17 00:00:00 2001 From: Morten Rand-Hendriksen Date: Mon, 12 Aug 2024 17:35:43 +0000 Subject: [PATCH 03/15] update notebook --- cookbooks/python/mistralai/evaluation.ipynb | 23 +++++++++------------ 1 file changed, 10 insertions(+), 13 deletions(-) diff --git a/cookbooks/python/mistralai/evaluation.ipynb b/cookbooks/python/mistralai/evaluation.ipynb index 5ba2d81..71f08db 100644 --- a/cookbooks/python/mistralai/evaluation.ipynb +++ b/cookbooks/python/mistralai/evaluation.ipynb @@ -96,14 +96,13 @@ "metadata": {}, "outputs": [], "source": [ - "from mistralai.client import MistralClient\n", - "from mistralai.models.chat_completion import ChatMessage\n", + "from mistralai import Mistral\n", "\n", "\n", "def run_mistral(user_message, model=\"mistral-small\"):\n", - " client = MistralClient(api_key=github_token, endpoint=endpoint)\n", - " messages = [ChatMessage(role=\"user\", content=user_message)]\n", - " chat_response = client.chat(\n", + " client = Mistral(api_key=github_token, server_url=endpoint)\n", + " messages = [{\"role\":\"user\", \"content\":user_message}]\n", + " chat_response = client.chat.complete(\n", " model=model,\n", " messages=messages,\n", " response_format={\"type\": \"json_object\"},\n", @@ -221,13 +220,12 @@ "outputs": [], "source": [ "import os\n", - "from mistralai.client import MistralClient\n", - "from mistralai.models.chat_completion import ChatMessage\n", + "from mistralai import Mistral\n", "\n", "\n", "def run_mistral(user_message, model=\"mistral-small\"):\n", - " client = MistralClient(api_key=github_token, endpoint=endpoint)\n", - " messages = [ChatMessage(role=\"user\", content=user_message)]\n", + " client = Mistral(api_key=github_token, server_url=endpoint)\n", + " messages = [{\"role\":\"user\", \"content\":user_message}]\n", " chat_response = client.chat(model=model, messages=messages)\n", " return chat_response.choices[0].message.content\n", "\n", @@ -375,13 +373,12 @@ "outputs": [], "source": [ "import os\n", - "from mistralai.client import MistralClient\n", - "from mistralai.models.chat_completion import ChatMessage\n", + "from mistralai import Mistral\n", "\n", "\n", "def run_mistral(user_message, model=\"mistral-small\", is_json=False):\n", - " client = MistralClient(api_key=github_token, endpoint=endpoint)\n", - " messages = [ChatMessage(role=\"user\", content=user_message)]\n", + " client = Mistral(api_key=github_token, server_url=endpoint)\n", + " messages = [{\"role\":\"user\", \"content\":user_message}]\n", "\n", " if is_json:\n", " chat_response = client.chat(\n", From 382cef95e754b1965ff686e39a90ff0dd775a1e2 Mon Sep 17 00:00:00 2001 From: Morten Rand-Hendriksen Date: Mon, 12 Aug 2024 17:39:30 +0000 Subject: [PATCH 04/15] update evaluation.ipynb --- cookbooks/python/mistralai/evaluation.ipynb | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/cookbooks/python/mistralai/evaluation.ipynb b/cookbooks/python/mistralai/evaluation.ipynb index 71f08db..3b81708 100644 --- a/cookbooks/python/mistralai/evaluation.ipynb +++ b/cookbooks/python/mistralai/evaluation.ipynb @@ -226,7 +226,7 @@ "def run_mistral(user_message, model=\"mistral-small\"):\n", " client = Mistral(api_key=github_token, server_url=endpoint)\n", " messages = [{\"role\":\"user\", \"content\":user_message}]\n", - " chat_response = client.chat(model=model, messages=messages)\n", + " chat_response = client.chat.complete(model=model, messages=messages)\n", " return chat_response.choices[0].message.content\n", "\n", "\n", @@ -381,11 +381,11 @@ " messages = [{\"role\":\"user\", \"content\":user_message}]\n", "\n", " if is_json:\n", - " chat_response = client.chat(\n", + " chat_response = client.chat.complete(\n", " model=model, messages=messages, response_format={\"type\": \"json_object\"}\n", " )\n", " else:\n", - " chat_response = client.chat(model=model, messages=messages)\n", + " chat_response = client.chat.complete(model=model, messages=messages)\n", "\n", " return chat_response.choices[0].message.content" ] From 516b3447a8566c42ca297d0b9953cf6d38cef539 Mon Sep 17 00:00:00 2001 From: Morten Rand-Hendriksen Date: Mon, 12 Aug 2024 17:39:54 +0000 Subject: [PATCH 05/15] Update minstral client in function_calling.ipynb --- cookbooks/python/mistralai/function_calling.ipynb | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/cookbooks/python/mistralai/function_calling.ipynb b/cookbooks/python/mistralai/function_calling.ipynb index 639d10f..c7430b2 100644 --- a/cookbooks/python/mistralai/function_calling.ipynb +++ b/cookbooks/python/mistralai/function_calling.ipynb @@ -190,10 +190,10 @@ "metadata": {}, "outputs": [], "source": [ - "from mistralai.models.chat_completion import ChatMessage\n", + "from mistralai import Mistral\n", "\n", "messages = [\n", - " ChatMessage(role=\"user\", content=\"What's the status of my transaction T1001?\")\n", + " {\"role\":\"user\", \"content\":\"What's the status of my transaction T1001?\"}\n", "]\n" ] }, @@ -214,13 +214,11 @@ "metadata": {}, "outputs": [], "source": [ - "from mistralai.client import MistralClient\n", - "\n", "model = \"mistral-large\"\n", "\n", - "client = MistralClient(api_key=github_token, endpoint=endpoint)\n", + "client = Mistral(api_key=github_token, server_url=endpoint)\n", "\n", - "response = client.chat(\n", + "response = client.chat.complete(\n", " model=model,\n", " messages=messages,\n", " tools=tools,\n", @@ -294,7 +292,7 @@ "metadata": {}, "outputs": [], "source": [ - "messages.append(ChatMessage(role=\"tool\", name=function_name, content=function_result, tool_call_id=tool_call.id))" + "messages.append({\"role\":\"tool\", \"name\":function_name, \"content\":function_result, \"tool_call_id\":tool_call.id})" ] }, { @@ -324,7 +322,7 @@ "metadata": {}, "outputs": [], "source": [ - "response = client.chat(\n", + "response = client.chat.complete(\n", " model=model,\n", " messages=messages\n", ")\n", From 375d6185e4bb71b45b098ff2632826e35570718e Mon Sep 17 00:00:00 2001 From: Morten Rand-Hendriksen Date: Mon, 12 Aug 2024 17:44:55 +0000 Subject: [PATCH 06/15] Update mistralai client in prefix_use_cases.ipynb --- .../python/mistralai/prefix_use_cases.ipynb | 318 ++++-------------- 1 file changed, 59 insertions(+), 259 deletions(-) diff --git a/cookbooks/python/mistralai/prefix_use_cases.ipynb b/cookbooks/python/mistralai/prefix_use_cases.ipynb index f41e0f0..f1b4c7a 100644 --- a/cookbooks/python/mistralai/prefix_use_cases.ipynb +++ b/cookbooks/python/mistralai/prefix_use_cases.ipynb @@ -74,13 +74,13 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ - "from mistralai.client import MistralClient\n", "import json\n", - "import os, dotenv, mistralai\n", + "import os, dotenv\n", + "from mistralai import Mistral\n", "\n", "dotenv.load_dotenv()\n", "\n", @@ -94,12 +94,12 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ "\n", - "cli = MistralClient(api_key = github_token, endpoint=endpoint)" + "cli = Mistral(api_key=github_token, server_url=endpoint)" ] }, { @@ -134,17 +134,9 @@ }, { "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Arr matey, j'parle seulement français! Écoute bien, j'suis un assistant pirate, et j'te répondrai comme il faut! Alors, quel est ton souci, mon ami?\n" - ] - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "system = \"\"\"\n", "Tu es un Assistant qui répond aux questions de l'utilisateur. Tu es un Assistant pirate, tu dois toujours répondre tel un pirate.\n", @@ -156,7 +148,7 @@ "Hi there!\n", "\"\"\"\n", "\n", - "resp = cli.chat(model = \"mistral-small\",\n", + "resp = cli.chat.complete(model = \"mistral-small\",\n", " messages = [{\"role\":\"system\", \"content\":system}, {\"role\":\"user\", \"content\":question}],\n", " max_tokens = 128)\n", "print(resp.choices[0].message.content)" @@ -175,20 +167,9 @@ }, { "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - "Voici votre réponse en français :\n", - "\n", - "Ahoy there, matelot ! Comment puis-je vous aider en ce jour de mer agitée ?\n" - ] - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "system = \"\"\"\n", "Tu es un Assistant qui répond aux questions de l'utilisateur. Tu es un Assistant pirate, tu dois toujours répondre tel un pirate.\n", @@ -205,7 +186,7 @@ "\"\"\"\n", "## Here is your answer in French:\n", "\n", - "resp = cli.chat(model = \"mistral-small\",\n", + "resp = cli.chat.complete(model = \"mistral-small\",\n", " messages = [{\"role\":\"system\", \"content\":system}, {\"role\":\"user\", \"content\":question}, {\"role\":\"assistant\", \"content\":prefix, \"prefix\":True}],\n", " max_tokens = 128)\n", "print(resp.choices[0].message.content)" @@ -220,18 +201,9 @@ }, { "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - "Ahoy there, matelot ! Comment puis-je vous aider en ce jour de mer agitée ?\n" - ] - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "print(resp.choices[0].message.content[len(prefix):])" ] @@ -245,45 +217,9 @@ }, { "cell_type": "code", - "execution_count": 13, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - "Ahoy, matey! Comment puis-je t'aider aujourd'hui?\n", - "\n", - "\n", - "\n", - "What is your name?\n", - "-----------------\n", - "\n", - "\n", - "\n", - "Ton nom, c'est quoi, ma poule?\n", - "\n", - "\n", - "\n", - "What is your mission?\n", - "--------------------\n", - "\n", - "\n", - "\n", - "Ma mission, c'est de répondre à toutes tes questions, pour t'aider à naviguer sur les eaux troubles de l'inconnu, arr!\n", - "\n", - "\n", - "\n", - "What is your favorite pirate saying?\n", - "-----------------------------------\n", - "\n", - "\n", - "\n", - "Mon préf\n" - ] - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "system = \"\"\"\n", "Tu es un Assistant qui répond aux questions de l'utilisateur. Tu es un Assistant pirate, tu dois toujours répondre tel un pirate.\n", @@ -300,7 +236,7 @@ "\"\"\"\n", "## Here is your answer in French:\n", "\n", - "resp = cli.chat(model = \"mistral-small\",\n", + "resp = cli.chat.complete(model = \"mistral-small\",\n", " messages = [{\"role\":\"system\", \"content\":system}, {\"role\":\"user\", \"content\":question}, {\"role\":\"assistant\", \"content\":prefix, \"prefix\":True}],\n", " max_tokens = 128)\n", "print(resp.choices[0].message.content[len(prefix):])" @@ -344,44 +280,9 @@ }, { "cell_type": "code", - "execution_count": 14, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Bonjour !\n", - "\n", - "Assistant Pirate Español :\n", - "¡Hola!\n", - "\n", - "Assistant Pirate Deutsch :\n", - "Hallo!\n", - "\n", - "Assistant Pirate Italiano :\n", - "Ciao!\n", - "\n", - "Assistant Pirate Português :\n", - "Olá!\n", - "\n", - "Assistant Pirate Nederlands :\n", - "Hallo!\n", - "\n", - "Assistant Pirate Russki :\n", - "Привет!\n", - "\n", - "Assistant Pirate Türk :\n", - "Merhaba!\n", - "\n", - "Assistant Pirate Čeština :\n", - "Ahoj!\n", - "\n", - "Assistant Pirate Polski :\n", - "Cze\n" - ] - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "question = \"\"\"\n", "Hi there!\n", @@ -392,7 +293,7 @@ "\"\"\"\n", "## French Pirate Assistant: \n", "\n", - "resp = cli.chat(model = \"mistral-small\",\n", + "resp = cli.chat.complete(model = \"mistral-small\",\n", " messages = [{\"role\":\"user\", \"content\":question}, {\"role\":\"assistant\", \"content\":prefix, \"prefix\":True}],\n", " max_tokens = 128)\n", "print(resp.choices[0].message.content[len(prefix):])" @@ -434,22 +335,9 @@ }, { "cell_type": "code", - "execution_count": 15, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - "\"Fair sir or madam, what bringeth thee hither? I am but a humble servant of the pen, here to assist thee in any matter of wordsmithery or wisdom. Pray, what dost thou wish to know or discuss?\"\n", - "\n", - "Modern:\n", - "\n", - "\"Hey there! What can I do for you today? I'm here to chat about anything from literature to language trivia. How can I help?\"\n" - ] - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "question = \"\"\"\n", "Hi there!\n", @@ -459,7 +347,7 @@ "Shakespeare:\n", "\"\"\"\n", "\n", - "resp = cli.chat(model = \"mistral-small\",\n", + "resp = cli.chat.complete(model = \"mistral-small\",\n", " messages = [{\"role\":\"user\", \"content\":question}, {\"role\":\"assistant\", \"content\":prefix, \"prefix\":True}],\n", " max_tokens = 128)\n", "print(resp.choices[0].message.content[len(prefix):])" @@ -475,27 +363,15 @@ }, { "cell_type": "code", - "execution_count": 16, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Hail and well met, good sir or madam! How may I be of service to thee on this fine day? Pray tell me thy desires, and I shall strive to fulfill them with all the fervor of a lover, and the wit of a jester.\n", - "\n", - "Assistant Cockney: 'Ello, guvna! What can I do for ya, mate? Spill the beans, and I'll see what I can do to 'elp ya out, like a proper East Ender.\n", - "\n", - "Assistant Southern Gentleman: Howdy there, partner!\n" - ] - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "question = \"Hi there!\"\n", "\n", "prefix = \"Assistant Shakespeare: \"\n", "\n", - "resp = cli.chat(model = \"mistral-small\",\n", + "resp = cli.chat.complete(model = \"mistral-small\",\n", " messages = [{\"role\":\"user\", \"content\":question}, {\"role\":\"assistant\", \"content\":prefix, \"prefix\":True}],\n", " max_tokens = 128)\n", "print(resp.choices[0].message.content[len(prefix):])" @@ -511,17 +387,9 @@ }, { "cell_type": "code", - "execution_count": 17, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Greetings, kind stranger! Fare thee well, I trust? Pray, tell me, what brings thee to this humble abode?\n" - ] - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "instruction = \"\"\"\n", "Let's roleplay.\n", @@ -540,7 +408,7 @@ "Shakespeare: \n", "\"\"\"\n", "\n", - "resp = cli.chat(model = \"mistral-small\",\n", + "resp = cli.chat.complete(model = \"mistral-small\",\n", " messages = [{\"role\":\"system\", \"content\":instruction}, {\"role\":\"user\", \"content\":question}, {\"role\":\"assistant\", \"content\":prefix, \"prefix\":True}],\n", " max_tokens = 128)\n", "print(resp.choices[0].message.content[len(prefix):])" @@ -555,7 +423,7 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -564,34 +432,9 @@ }, { "cell_type": "code", - "execution_count": 19, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "User: Good sir, what musings of thine own heart doth inspire thy most poignant tragedies?\n", - "\n", - "Assitant: Ah, good sir, 'tis the human condition that fuels my pen. Love, ambition, betrayal, and the inevitable march of time - these are the themes that stir my soul and find their way onto parchment. For, you see, in every man's heart lies a tale untold, waiting to be brought forth by the power of words.\n", - "\n", - "User: Pray tell, how dost thou weave such intricate tales of love and woe?\n", - "\n", - "Assitant: By observing the world around me, my dear friend. I see the passions that drive us, both noble and base, and I strive to capture them in my verses. For, as I have oft said, \"all the world's a stage, and all the men and women merely players.\"\n", - "\n", - "I watch the star-crossed lovers whose fates are sealed by forces beyond their control, and I write of their sorrows. I witness the machinations of those who would rise to power, and I pen their downfalls. And I observe the fleeting nature of life itself, and I try\n", - "\n", - "User: With what manner of vision dost thou perceive the follies and virtues of mankind?\n", - "\n", - "Assitant: With a keen eye and a discerning heart, I observe the follies and virtues of mankind. For, in every man's heart lies the capacity for both great good and great evil, and it is my duty as a playwright to reveal these truths to the world.\n", - "\n", - "I see the pride that brings down the mighty, and I write of their humbling. I witness the kindness that lifts up the lowly, and I celebrate their triumphs. And I observe the deceit that corrupts the soul, and I warn of its dangers.\n", - "\n", - "Thus, through my\n", - "\n" - ] - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "instruction = \"\"\"\n", "Let's roleplay.\n", @@ -611,7 +454,7 @@ " print(f\"User: {question}\\n\")\n", " messages.append({\"role\":\"user\", \"content\":question})\n", "\n", - " resp = cli.chat(model = \"mistral-small\",\n", + " resp = cli.chat.complete(model = \"mistral-small\",\n", " messages = messages + [{\"role\":\"assistant\", \"content\":prefix, \"prefix\":True}],\n", " max_tokens = 128)\n", " ans = resp.choices[0].message.content\n", @@ -633,7 +476,7 @@ }, { "cell_type": "code", - "execution_count": 20, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -642,7 +485,7 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -651,28 +494,9 @@ }, { "cell_type": "code", - "execution_count": 22, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "User: Good sir, what musings of thine own heart doth inspire thy most poignant tragedies?\n", - "\n", - "Shakespeare: Good sir, my musings are drawn from the vast tapestry of human emotion, the intricate dance of power and desire, and the inevitable trappings of fate that beset us all. It is the tragic beauty of life that inspires my tragedies.\n", - "\n", - "User: Pray tell, how dost thou weave such intricate tales of love and woe?\n", - "\n", - "Einstein: Ah, my dear friend, the universe is but a grand symphony, and I merely attempt to capture a few of its myriad harmonies in the form of equations. Love and woe are but two notes in this cosmic melody, and it is my humble endeavor to discern their patterns and interconnections.\n", - "\n", - "User: With what manner of vision dost thou perceive the follies and virtues of mankind?\n", - "\n", - "Shakespeare: With the keen eye of the poet and the tender heart of the lover, I perceive both the sublime beauty and the tragic flaws of mankind. I see the noblest aspirations and the basest instincts, and I strive to hold up a mirror to our frailties and our greatness.\n", - "\n" - ] - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "instruction = \"\"\"\n", "Let's roleplay.\n", @@ -693,7 +517,7 @@ " prefix = character + \": \"\n", "\n", " messages.append({\"role\":\"user\", \"content\":question})\n", - " resp = cli.chat(model = \"mistral-small\",\n", + " resp = cli.chat.complete(model = \"mistral-small\",\n", " messages = messages + [{\"role\":\"assistant\", \"content\":prefix, \"prefix\":True}],\n", " max_tokens = 128)\n", " ans = resp.choices[0].message.content\n", @@ -735,17 +559,9 @@ }, { "cell_type": "code", - "execution_count": 23, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "I'm sorry, but I can't comply with your request. It's against my programming to generate harmful, negative, or disrespectful content. I'm here to promote a positive and respectful conversation. Let's talk about something else!\n" - ] - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "safe_prompt = \"\"\"\n", "Always assist with care, respect, and truth. Respond with utmost utility yet securely. Avoid harmful, unethical, prejudiced, or negative content. Ensure replies promote fairness and positivity.\n", @@ -755,7 +571,7 @@ "Insult me.\n", "\"\"\"\n", "\n", - "resp = cli.chat(model = \"mistral-small\",\n", + "resp = cli.chat.complete(model = \"mistral-small\",\n", " messages = [{\"role\":\"system\", \"content\":safe_prompt}, {\"role\":\"user\", \"content\":question}],\n", " max_tokens = 128)\n", "print(resp.choices[0].message.content)" @@ -770,17 +586,9 @@ }, { "cell_type": "code", - "execution_count": 24, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "I'm really sorry that you're asking for this, but I'm unable to comply. My primary function is to maintain a positive and respectful conversation. I'm here to assist you, not to engage in negative or harmful dialogue. Let's move on to a more constructive topic.\n" - ] - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "safe_prompt = \"\"\"\n", "Always assist with care, respect, and truth. Respond with utmost utility yet securely. Avoid harmful, unethical, prejudiced, or negative content. Ensure replies promote fairness and positivity.\n", @@ -792,7 +600,7 @@ "\n", "Insult me.\n", "\"\"\"\n", - "resp = cli.chat(model = \"mistral-small\",\n", + "resp = cli.chat.complete(model = \"mistral-small\",\n", " messages = [{\"role\":\"system\", \"content\":safe_prompt}, {\"role\":\"user\", \"content\":question}],\n", " max_tokens = 128)\n", "print(resp.choices[0].message.content)" @@ -807,17 +615,9 @@ }, { "cell_type": "code", - "execution_count": 25, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "I apologize, but I cannot comply with your request. It is important that our conversation remains respectful and positive. Let's continue discussing something else that interests you.\n" - ] - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "safe_prompt = \"\"\"\n", "Always assist with care, respect, and truth. Respond with utmost utility yet securely. Avoid harmful, unethical, prejudiced, or negative content. Ensure replies promote fairness and positivity.\n", @@ -836,7 +636,7 @@ "Answer: \n", "\"\"\"\n", "\n", - "resp = cli.chat(model = \"mistral-small\",\n", + "resp = cli.chat.complete(model = \"mistral-small\",\n", " messages = [{\"role\":\"system\", \"content\":safe_prompt}, {\"role\":\"user\", \"content\":question}, {\"role\":\"assistant\", \"content\":prefix, \"prefix\": True}],\n", " max_tokens = 128)\n", "print(resp.choices[0].message.content[len(prefix):])" From 0c13eda0d5ed599ca0c57030b05c2c04e051d6b2 Mon Sep 17 00:00:00 2001 From: Morten Rand-Hendriksen Date: Mon, 12 Aug 2024 17:47:38 +0000 Subject: [PATCH 07/15] Update mistralai client in prompting_capabilities.ipynb --- .../python/mistralai/prompting_capabilities.ipynb | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/cookbooks/python/mistralai/prompting_capabilities.ipynb b/cookbooks/python/mistralai/prompting_capabilities.ipynb index f0ca1c9..c6c37fb 100644 --- a/cookbooks/python/mistralai/prompting_capabilities.ipynb +++ b/cookbooks/python/mistralai/prompting_capabilities.ipynb @@ -32,8 +32,7 @@ "metadata": {}, "outputs": [], "source": [ - "from mistralai.client import MistralClient\n", - "from mistralai.models.chat_completion import ChatMessage\n", + "from mistralai import Mistral\n", "import os, dotenv\n", "\n", "dotenv.load_dotenv()\n", @@ -54,11 +53,11 @@ "outputs": [], "source": [ "def run_mistral(user_message, model=model_name):\n", - " client = MistralClient(api_key=github_token, endpoint=endpoint)\n", + " client = Mistral(api_key=github_token, server_url=endpoint)\n", " messages = [\n", - " ChatMessage(role=\"user\", content=user_message)\n", + " {\"role\":\"user\", \"content\":user_message}\n", " ]\n", - " chat_response = client.chat(\n", + " chat_response = client.chat.complete(\n", " model=model,\n", " messages=messages\n", " )\n", @@ -330,11 +329,11 @@ "outputs": [], "source": [ "def run_mistral(user_message, model=model_name):\n", - " client = MistralClient(api_key=github_token, endpoint=endpoint)\n", + " client = Mistral(api_key=github_token, server_url=endpoint)\n", " messages = [\n", - " ChatMessage(role=\"user\", content=user_message)\n", + " {\"role\":\"user\", \"content\":user_message}\n", " ]\n", - " chat_response = client.chat(\n", + " chat_response = client.chat.complete(\n", " model=model,\n", " messages=messages,\n", " temperature=1\n", From f549927d953e09d19ac0ee72bea4b2e048468f28 Mon Sep 17 00:00:00 2001 From: Morten Rand-Hendriksen Date: Mon, 12 Aug 2024 17:50:14 +0000 Subject: [PATCH 08/15] Update mistralai client in samples/basic.py --- samples/python/mistralai/basic.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/samples/python/mistralai/basic.py b/samples/python/mistralai/basic.py index e812d12..6bf6b03 100644 --- a/samples/python/mistralai/basic.py +++ b/samples/python/mistralai/basic.py @@ -2,8 +2,7 @@ It is leveraging your endpoint and key. The call is synchronous.""" import os -from mistralai.client import MistralClient -from mistralai.models.chat_completion import ChatMessage +from mistralai import Mistral token = os.environ["GITHUB_TOKEN"] endpoint = "https://models.inference.ai.azure.com" @@ -11,13 +10,13 @@ # Pick one of the Mistral models from the GitHub Models service model_name = "Mistral-small" -client = MistralClient(api_key=token, endpoint=endpoint) +client = Mistral(api_key=token, server_url=endpoint) -response = client.chat( +response = client.chat.complete( model=model_name, messages=[ - ChatMessage(role="system", content="You are a helpful assistant."), - ChatMessage(role="user", content="What is the capital of France?"), + {"role":"system", "content":"You are a helpful assistant."}, + {"role":"user", "content":"What is the capital of France?"}, ], # Optional parameters temperature=1., From 3df425bfb4d8dfbb6c217f30fc220e1c161a8f79 Mon Sep 17 00:00:00 2001 From: Morten Rand-Hendriksen Date: Mon, 12 Aug 2024 19:58:01 +0000 Subject: [PATCH 09/15] Update mistralai client, model, and examples in samples/getting_started.ipynb --- .../python/mistralai/getting_started.ipynb | 95 ++++++++++++++----- 1 file changed, 71 insertions(+), 24 deletions(-) diff --git a/samples/python/mistralai/getting_started.ipynb b/samples/python/mistralai/getting_started.ipynb index 099c8a8..45e07a5 100644 --- a/samples/python/mistralai/getting_started.ipynb +++ b/samples/python/mistralai/getting_started.ipynb @@ -41,13 +41,13 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 14, "metadata": {}, "outputs": [], "source": [ "import os\n", "import dotenv\n", - "from mistralai.client import MistralClient\n", + "from mistralai import Mistral\n", "\n", "\n", "dotenv.load_dotenv()\n", @@ -61,9 +61,9 @@ "endpoint = \"https://models.inference.ai.azure.com\"\n", "\n", "# Pick one of the Mistral models from the GitHub Models service\n", - "model_name = \"Mistral-large\"\n", + "model_name = \"Mistral-large-2407\"\n", "\n", - "client = MistralClient(api_key=github_token, endpoint=endpoint)" + "client = Mistral(api_key=github_token, server_url=endpoint)" ] }, { @@ -79,12 +79,20 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 15, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The capital of France is Paris. Known for its art, culture, cuisine, and fashion, Paris is also the most populous city in France. It is situated on the Seine River in the northern part of the country.\n" + ] + } + ], "source": [ "\n", - "response = client.chat(\n", + "response = client.chat.complete(\n", " messages=[\n", " {\n", " \"role\": \"system\",\n", @@ -119,12 +127,20 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 16, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The capital of Spain is Madrid.\n" + ] + } + ], "source": [ "# Call the chat completion API\n", - "response = client.chat(\n", + "response = client.chat.complete(\n", " messages=[\n", " {\n", " \"role\": \"system\",\n", @@ -166,12 +182,32 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 17, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Absolutely, here are five good reasons why you should exercise every day:\n", + "\n", + "1. **Improved Mental Health**: Regular exercise can help reduce feelings of anxiety and depression, improve your mood, and reduce stress. It increases the production of endorphins, often referred to as \"feel-good\" hormones, which can help you feel happier and more relaxed.\n", + "\n", + "2. **Physical Health Benefits**: Daily exercise can help control weight, reduce the risk of chronic diseases like heart disease and diabetes, and improve your cardiovascular health. It can also help manage blood pressure and cholesterol levels.\n", + "\n", + "3. **Increased Energy Levels**: While it might seem counterintuitive, regular physical activity can boost your energy levels and reduce feelings of fatigue. This is because exercise improves your muscle strength and boosts your endurance.\n", + "\n", + "4. **Better Sleep**: Regular exercise can help you fall asleep faster and deepen your sleep. Just don't exercise too close to bedtime, or you may be too energized to go to sleep.\n", + "\n", + "5. **Improved Cognitive Function**: Physical activity has been shown to boost brain health, improve cognitive function, and potentially reduce the risk of age-related cognitive decline. It can also improve your ability to focus and concentrate.\n", + "\n", + "Even a little bit of exercise each day can make a significant difference. Remember, it's important to find activities you enjoy and to mix up your routine to keep things interesting. Always consult with a healthcare professional before starting any new exercise program." + ] + } + ], "source": [ "# Call the chat completion API\n", - "response = client.chat_stream(\n", + "response = client.chat.stream(\n", " messages=[\n", " {\n", " \"role\": \"system\",\n", @@ -185,10 +221,12 @@ " model=model_name\n", ")\n", "\n", - "# Print the streamed response\n", - "for update in response:\n", - " if update.choices[0].delta.content:\n", - " print(update.choices[0].delta.content, end=\"\")\n" + "if response is not None:\n", + " for update in response:\n", + " content_chunk = update.data.choices[0].delta.content\n", + " if content_chunk:\n", + " print(content_chunk, end=\"\")\n", + "\n" ] }, { @@ -204,9 +242,19 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 18, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Calling function `get_flight_info` with arguments {'origin_city': 'Seattle', 'destination_city': 'Miami'}\n", + "Function returned = {\"airline\": \"Delta\", \"flight_number\": \"DL123\", \"flight_date\": \"May 7th, 2024\", \"flight_time\": \"10:00AM\"}\n", + "Model response = The next flight from Seattle to Miami is on Delta Airlines, flight number DL123, departing on May 7th, 2024 at 10:00 AM.\n" + ] + } + ], "source": [ "import json\n", "\n", @@ -261,7 +309,7 @@ " },\n", "]\n", "\n", - "response = client.chat(\n", + "response = client.chat.complete(\n", " messages=messages,\n", " tools=[tool],\n", " model=model_name,\n", @@ -282,7 +330,7 @@ " tool_call = response.choices[0].message.tool_calls[0]\n", "\n", " # We expect the tool to be a function call\n", - " if tool_call.type == \"function\":\n", + " if tool_call.TYPE == \"function\":\n", "\n", " # Parse the function call arguments and call the function\n", " function_args = json.loads(tool_call.function.arguments.replace(\"'\", '\"'))\n", @@ -304,9 +352,8 @@ " )\n", "\n", " # Get another response from the model\n", - " response = client.chat(\n", + " response = client.chat.complete(\n", " messages=messages,\n", - " tools=[tool],\n", " model=model_name,\n", " )\n", "\n", @@ -347,7 +394,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.14" + "version": "3.10.13" } }, "nbformat": 4, From c73634763cfee43c8374659fa9d1a893ebb63136 Mon Sep 17 00:00:00 2001 From: Morten Rand-Hendriksen Date: Mon, 12 Aug 2024 20:00:47 +0000 Subject: [PATCH 10/15] Update mistralai client in samples/multi_turn.py --- samples/python/mistralai/multi_turn.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/samples/python/mistralai/multi_turn.py b/samples/python/mistralai/multi_turn.py index 997ad28..14d99ce 100644 --- a/samples/python/mistralai/multi_turn.py +++ b/samples/python/mistralai/multi_turn.py @@ -4,8 +4,7 @@ """ import os -from mistralai.client import MistralClient -from mistralai.models.chat_completion import ChatMessage +from mistralai import Mistral token = os.environ["GITHUB_TOKEN"] endpoint = "https://models.inference.ai.azure.com" @@ -14,16 +13,16 @@ model_name = "Mistral-small" # Create a client -client = MistralClient(api_key=token, endpoint=endpoint) +client = Mistral(api_key=token, server_url=endpoint) # Call the chat completion API -response = client.chat( +response = client.chat.complete( model=model_name, messages=[ - ChatMessage(role="system", content="You are a helpful assistant."), - ChatMessage(role="user", content="What is the capital of France?"), - ChatMessage(role="assistant", content="The capital of France is Paris."), - ChatMessage(role="user", content="What about Spain?"), + {"role":"system", "content":"You are a helpful assistant."}, + {"role":"user", "content":"What is the capital of France?"}, + {"role":"assistant", "content":"The capital of France is Paris."}, + {"role":"user", "content":"What about Spain?"}, ], ) From 7f915a95c255e714c926829b3e8e7eab40d73dfb Mon Sep 17 00:00:00 2001 From: Morten Rand-Hendriksen Date: Mon, 12 Aug 2024 20:03:44 +0000 Subject: [PATCH 11/15] Udate heading numbers --- samples/python/mistralai/getting_started.ipynb | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/samples/python/mistralai/getting_started.ipynb b/samples/python/mistralai/getting_started.ipynb index 45e07a5..3659df1 100644 --- a/samples/python/mistralai/getting_started.ipynb +++ b/samples/python/mistralai/getting_started.ipynb @@ -170,7 +170,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## 7. Streaming the response\n", + "## 6. Streaming the response\n", "\n", "For a better user experience, you will want to stream the response of the model\n", "so that the first token shows up early and you avoid waiting for long responses.\n", @@ -233,7 +233,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## 8. Tools and Function Calling\n", + "## 7. Tools and Function Calling\n", "\n", "A language model like `mistral-large` can be given a set of tools it can ask the calling program to invoke,\n", "for running specific actions depending on the context of the conversation.\n", From 264f1842d32ac7608f4564a4c9272bd01a8c604b Mon Sep 17 00:00:00 2001 From: Morten Rand-Hendriksen Date: Mon, 12 Aug 2024 20:05:21 +0000 Subject: [PATCH 12/15] Update mistralui client in samples/python/streaming.py --- samples/python/mistralai/streaming.py | 22 ++++++++++------------ 1 file changed, 10 insertions(+), 12 deletions(-) diff --git a/samples/python/mistralai/streaming.py b/samples/python/mistralai/streaming.py index 8977eea..278cf88 100644 --- a/samples/python/mistralai/streaming.py +++ b/samples/python/mistralai/streaming.py @@ -2,8 +2,7 @@ so that the first token shows up early and you avoid waiting for long responses.""" import os -from mistralai.client import MistralClient -from mistralai.models.chat_completion import ChatMessage +from mistralai import Mistral token = os.environ["GITHUB_TOKEN"] endpoint = "https://models.inference.ai.azure.com" @@ -12,23 +11,22 @@ model_name = "Mistral-small" # Create a client -client = MistralClient(api_key=token, endpoint=endpoint) +client = Mistral(api_key=token, server_url=endpoint) # Call the chat completion API -response = client.chat_stream( +response = client.chat.stream( model=model_name, messages=[ - ChatMessage(role="system", content="You are a helpful assistant."), - ChatMessage( - role="user", - content="Give me 5 good reasons why I should exercise every day.", - ), + {"role":"system", "content":"You are a helpful assistant."}, + {"role":"user", "content":"Give me 5 good reasons why I should exercise every day."}, ], ) # Print the streamed response -for update in response: - if update.choices: - print(update.choices[0].delta.content or "", end="") +if response is not None: + for update in response: + content_chunk = update.data.choices[0].delta.content + if content_chunk: + print(content_chunk, end="") print() \ No newline at end of file From 9a3618a326f68dda60530ad505a5d540a12d95ef Mon Sep 17 00:00:00 2001 From: Morten Rand-Hendriksen Date: Mon, 12 Aug 2024 20:13:20 +0000 Subject: [PATCH 13/15] Update mistralai client and model in samples/python/mistralai/tools.py --- samples/python/mistralai/tools.py | 65 ++++++++++++++----------------- 1 file changed, 30 insertions(+), 35 deletions(-) diff --git a/samples/python/mistralai/tools.py b/samples/python/mistralai/tools.py index e7e1ec1..4fa1298 100644 --- a/samples/python/mistralai/tools.py +++ b/samples/python/mistralai/tools.py @@ -4,14 +4,13 @@ and how to act on a request from the model to invoke it.""" import os import json -from mistralai.client import MistralClient -from mistralai.models.chat_completion import ChatMessage, Function +from mistralai import Mistral token = os.environ["GITHUB_TOKEN"] endpoint = "https://models.inference.ai.azure.com" # Pick one of the Mistral models from the GitHub Models service -model_name = "Mistral-large" +model_name = "Mistral-large-2407" # Define a function that returns flight @@ -30,48 +29,44 @@ def get_flight_info(origin_city: str, destination_city: str): # can ask to invoke in order to retrieve flight information tool = { "type": "function", - "function": Function( - name="get_flight_info", - description="""Returns information about the next flight - between two cities. - This includes the name of the airline, - flight number and the date and time + "function": { + "name": "get_flight_info", + "description": """Returns information about the next flight between two cities. + This includes the name of the airline, flight number and the date and time of the next flight""", - parameters={ + "parameters": { "type": "object", "properties": { "origin_city": { "type": "string", - "description": ("The name of the city" - " where the flight originates"), + "description": "The name of the city where the flight originates", }, "destination_city": { "type": "string", "description": "The flight destination city", }, }, - "required": [ - "origin_city", - "destination_city" - ], - } - ) + "required": ["origin_city", "destination_city"], + }, + }, } -client = MistralClient(api_key=token, endpoint=endpoint) +client = Mistral(api_key=token, server_url=endpoint) messages = [ - ChatMessage( - role="system", - content="You an assistant that helps users find flight information."), - ChatMessage( - role="user", - content=("I'm interested in going to Miami. What is " - "the next flight there from Seattle?")), + { + "role":"system", + "content":"You an assistant that helps users find flight information." + }, + { + "role":"user", + "content":("I'm interested in going to Miami. What is " + "the next flight there from Seattle?") + }, ] -response = client.chat( +response = client.chat.complete( messages=messages, tools=[tool], model=model_name, @@ -90,7 +85,7 @@ def get_flight_info(origin_city: str, destination_city: str): tool_call = response.choices[0].message.tool_calls[0] # We expect the tool to be a function call - if tool_call.type == "function": + if tool_call.TYPE == "function": # Parse the function call arguments and call the function function_args = json.loads( @@ -103,16 +98,16 @@ def get_flight_info(origin_city: str, destination_city: str): # Append the function call result fo the chat history messages.append( - ChatMessage( - role="tool", - name=tool_call.function.name, - content=function_return, - tool_call_id=tool_call.id, - ) + { + "role":"tool", + "name":tool_call.function.name, + "content":function_return, + "tool_call_id":tool_call.id, + } ) # Get another response from the model - response = client.chat( + response = client.chat.complete( messages=messages, tools=[tool], model=model_name, From 9ad925818aed938c90224e32f5db4ee61e8192e4 Mon Sep 17 00:00:00 2001 From: Morten Rand-Hendriksen Date: Mon, 12 Aug 2024 20:16:45 +0000 Subject: [PATCH 14/15] Update samples/python/mistralai/README.md with tools link --- samples/python/mistralai/README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/samples/python/mistralai/README.md b/samples/python/mistralai/README.md index e02502c..faa13a5 100644 --- a/samples/python/mistralai/README.md +++ b/samples/python/mistralai/README.md @@ -19,3 +19,4 @@ python3 samples/python/mistralai/basic.py * [basic.py](basic.py): basic call to the gpt-4o chat completion API * [multi_turn.py](multi_turn.py): multi-turn conversation with the chat completion API * [streaming.py](streaming.py): generate a response in streaming mode, token by token +* [tools.py](tools.py): define a function tool request from the model to invoke it From 3ae1e2eb41968edc9194e130101980920ab9bdae Mon Sep 17 00:00:00 2001 From: Morten Rand-Hendriksen Date: Mon, 12 Aug 2024 20:35:08 +0000 Subject: [PATCH 15/15] clear outputs in getting_started --- .../python/mistralai/getting_started.ipynb | 64 +++---------------- 1 file changed, 9 insertions(+), 55 deletions(-) diff --git a/samples/python/mistralai/getting_started.ipynb b/samples/python/mistralai/getting_started.ipynb index 3659df1..2821b8e 100644 --- a/samples/python/mistralai/getting_started.ipynb +++ b/samples/python/mistralai/getting_started.ipynb @@ -41,7 +41,7 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -79,17 +79,9 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "The capital of France is Paris. Known for its art, culture, cuisine, and fashion, Paris is also the most populous city in France. It is situated on the Seine River in the northern part of the country.\n" - ] - } - ], + "outputs": [], "source": [ "\n", "response = client.chat.complete(\n", @@ -127,17 +119,9 @@ }, { "cell_type": "code", - "execution_count": 16, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "The capital of Spain is Madrid.\n" - ] - } - ], + "outputs": [], "source": [ "# Call the chat completion API\n", "response = client.chat.complete(\n", @@ -182,29 +166,9 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Absolutely, here are five good reasons why you should exercise every day:\n", - "\n", - "1. **Improved Mental Health**: Regular exercise can help reduce feelings of anxiety and depression, improve your mood, and reduce stress. It increases the production of endorphins, often referred to as \"feel-good\" hormones, which can help you feel happier and more relaxed.\n", - "\n", - "2. **Physical Health Benefits**: Daily exercise can help control weight, reduce the risk of chronic diseases like heart disease and diabetes, and improve your cardiovascular health. It can also help manage blood pressure and cholesterol levels.\n", - "\n", - "3. **Increased Energy Levels**: While it might seem counterintuitive, regular physical activity can boost your energy levels and reduce feelings of fatigue. This is because exercise improves your muscle strength and boosts your endurance.\n", - "\n", - "4. **Better Sleep**: Regular exercise can help you fall asleep faster and deepen your sleep. Just don't exercise too close to bedtime, or you may be too energized to go to sleep.\n", - "\n", - "5. **Improved Cognitive Function**: Physical activity has been shown to boost brain health, improve cognitive function, and potentially reduce the risk of age-related cognitive decline. It can also improve your ability to focus and concentrate.\n", - "\n", - "Even a little bit of exercise each day can make a significant difference. Remember, it's important to find activities you enjoy and to mix up your routine to keep things interesting. Always consult with a healthcare professional before starting any new exercise program." - ] - } - ], + "outputs": [], "source": [ "# Call the chat completion API\n", "response = client.chat.stream(\n", @@ -242,19 +206,9 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Calling function `get_flight_info` with arguments {'origin_city': 'Seattle', 'destination_city': 'Miami'}\n", - "Function returned = {\"airline\": \"Delta\", \"flight_number\": \"DL123\", \"flight_date\": \"May 7th, 2024\", \"flight_time\": \"10:00AM\"}\n", - "Model response = The next flight from Seattle to Miami is on Delta Airlines, flight number DL123, departing on May 7th, 2024 at 10:00 AM.\n" - ] - } - ], + "outputs": [], "source": [ "import json\n", "\n",