From 216185f9bc05a4c05b7b05a6ab736a93c901c020 Mon Sep 17 00:00:00 2001 From: David Zhao Date: Fri, 31 Jan 2025 15:16:09 -0800 Subject: [PATCH] Update examples (#1432) --- .../function_calling_weather.py | 31 ++++++++++++------- .../voice-pipeline-agent/turn_detector.py | 2 +- 2 files changed, 21 insertions(+), 12 deletions(-) diff --git a/examples/voice-pipeline-agent/function_calling_weather.py b/examples/voice-pipeline-agent/function_calling_weather.py index f39705f17e..3f695aac5f 100644 --- a/examples/voice-pipeline-agent/function_calling_weather.py +++ b/examples/voice-pipeline-agent/function_calling_weather.py @@ -1,7 +1,5 @@ import logging import random -import re -import urllib from typing import Annotated import aiohttp @@ -34,10 +32,19 @@ async def get_weather( location: Annotated[ str, llm.TypeInfo(description="The location to get the weather for") ], + latitude: Annotated[ + str, + llm.TypeInfo(description="The latitude of location to get the weather for"), + ], + longitude: Annotated[ + str, + llm.TypeInfo( + description="The longitude of location to get the weather for" + ), + ], ): - """Called when the user asks about the weather. This function will return the weather for the given location.""" - # Clean the location string of special characters - location = re.sub(r"[^a-zA-Z0-9]+", " ", location).strip() + """Called when the user asks about the weather. This function will return the weather for the given location. + When given a location, please estimate the latitude and longitude of the location and do not ask the user for them.""" # When a function call is running, there are a couple of options to inform the user # that it might take awhile: @@ -63,16 +70,18 @@ async def get_weather( # of the chat context of the function call for answer synthesis speech_handle = await agent.say(message, add_to_chat_ctx=True) # noqa: F841 - logger.info(f"getting weather for {location}") - url = f"https://wttr.in/{urllib.parse.quote(location)}?format=%C+%t" - weather_data = "" + logger.info(f"getting weather for {latitude}, {longitude}") + url = f"https://api.open-meteo.com/v1/forecast?latitude={latitude}&longitude={longitude}¤t=temperature_2m" + weather_data = {} async with aiohttp.ClientSession() as session: async with session.get(url) as response: if response.status == 200: + data = await response.json() # response from the function call is returned to the LLM - weather_data = ( - f"The weather in {location} is {await response.text()}." - ) + weather_data = { + "temperature": data["current"]["temperature_2m"], + "temperature_unit": "Celsius", + } logger.info(f"weather data: {weather_data}") else: raise Exception( diff --git a/examples/voice-pipeline-agent/turn_detector.py b/examples/voice-pipeline-agent/turn_detector.py index 898ac9cc3d..9b40112825 100644 --- a/examples/voice-pipeline-agent/turn_detector.py +++ b/examples/voice-pipeline-agent/turn_detector.py @@ -43,7 +43,7 @@ async def entrypoint(ctx: JobContext): agent = VoicePipelineAgent( vad=ctx.proc.userdata["vad"], stt=deepgram.STT(), - llm=openai.LLM(), + llm=openai.LLM(model="gpt-4o-mini"), tts=openai.TTS(), chat_ctx=initial_ctx, turn_detector=turn_detector.EOUModel(),