Skip to main content

Simple Bot with Weather Tool and Error Handling

The example below is similar to the Simple Bot with Weather Tool example, but here we add on_error for error handling in case something went wrong when calling the weather function. What we do is simply inject the error back into the LLM, so it can figure out itself what is missing, which in example below is the location field, and ask the user for more input

import json
from typing import Any, AsyncGenerator, List, Literal, Tuple, TypedDict
from langstream import debug, as_async_generator

from langstream.contrib.llms.open_ai import (
OpenAIChatStream,
OpenAIChatDelta,
OpenAIChatMessage,
)
from langstream.core.stream import Stream, StreamOutput


class Memory(TypedDict):
history: List[OpenAIChatMessage]


memory = Memory(history=[])


def save_message_to_memory(message: OpenAIChatMessage) -> OpenAIChatMessage:
memory["history"].append(message)
return message


def update_delta_on_memory(delta: OpenAIChatDelta) -> OpenAIChatDelta:
if not isinstance(delta, OpenAIChatDelta):
return delta

if memory["history"][-1].role != delta.role and delta.role is not None:
memory["history"].append(
OpenAIChatMessage(role=delta.role, content=delta.content, name=delta.name)
)
else:
memory["history"][-1].content += delta.content
return delta


def get_current_weather(
location: str, format: Literal["celsius", "fahrenheit"] = "celsius"
) -> OpenAIChatDelta:
result = {
"location": location,
"forecast": "sunny",
"temperature": "25 C" if format == "celsius" else "77 F",
}

return OpenAIChatDelta(
role="function", name="get_current_weather", content=json.dumps(result)
)


def error_handler(
err: Exception,
) -> AsyncGenerator[StreamOutput[OpenAIChatDelta], Any]:
# Try to recover from the error if it happened on the function calling
if "get_current_weather" in str(err):
x = function_error_stream(("get_current_weather", err))
return x
else:
# Otherwise just re-raise it
raise err


# Stream Definitions

weather_stream = debug(
OpenAIChatStream[str, OpenAIChatDelta](
"WeatherStream",
lambda user_input: [
OpenAIChatMessage(
role="system",
content="You are a chatbot that has access to real-time weather information",
),
*memory["history"],
save_message_to_memory(
OpenAIChatMessage(role="user", content=user_input),
),
],
model="gpt-3.5-turbo-0613",
functions=[
{
"name": "get_current_weather",
"description": "Gets the current weather in a given location, use this function for any questions related to the weather",
"parameters": {
"type": "object",
"properties": {
"location": {
"description": "The city to get the weather, e.g. San Francisco. Guess the location from user messages",
"type": "string",
},
"format": {
"description": "A string with the full content of what the given role said",
"type": "string",
"enum": ("celsius", "fahrenheit"),
},
},
# We comment this out so the model can send empty location by mistake
# "required": ["location"],
},
}
],
temperature=0,
)
.map(
# We store the function call that the LLM made with it's arguments in memory, so it can inspect itself back later on
lambda delta: save_message_to_memory(
OpenAIChatMessage(
role="function",
name="get_current_weather",
content=delta.content,
)
)
# Then we call the function
and get_current_weather(**json.loads(delta.content))
# If it was called of course
if delta.role == "function" and delta.name == "get_current_weather"
else delta
)
.on_error(error_handler)
.map(update_delta_on_memory)
)

function_reply_stream = debug(
OpenAIChatStream[None, OpenAIChatDelta](
"FunctionReplyStream",
lambda _: memory["history"],
model="gpt-3.5-turbo-0613",
temperature=0,
).map(update_delta_on_memory)
)

# If an error happens, this stream is triggered, it simply takes the current history, plus a user message with the error message
# this is enough for the model to figure out what was the issue and ask user for additional input
function_error_stream = OpenAIChatStream[Tuple[str, Exception], OpenAIChatDelta](
"FunctionErrorStream",
lambda name_and_err: [
*memory["history"],
save_message_to_memory(
OpenAIChatMessage(
role="user",
content=str(name_and_err[1]),
),
),
],
model="gpt-3.5-turbo-0613",
temperature=0,
)

weather_bot: Stream[str, OpenAIChatDelta] = weather_stream.and_then(
# Reply based on function result if last output was a function output
lambda outputs: function_reply_stream(None)
if list(outputs)[-1].role == "function"
# Otherwise just re-yield the outputs
else as_async_generator(*outputs)
)
from langstream.utils.stream import collect_final_output

_ = await collect_final_output(weather_bot("hi there"))
    

> WeatherStream

Assistant: Hello! How can I assist you today?
from langstream.utils.stream import collect_final_output

_ = await collect_final_output(weather_bot("it is hot today?"))
    

> WeatherStream

Function get_current_weather: {}

> WeatherStream@map

Exception: get_current_weather() missing 1 required positional argument: 'location'

> FunctionErrorStream

Assistant: I apologize for the inconvenience. In order to provide you with the current weather, could you please provide me with your location?
_ = await collect_final_output(weather_bot("I am in Amsterdam"))
    

> WeatherStream

Function get_current_weather: {
"location": "Amsterdam"
}

> WeatherStream@map

Function get_current_weather: {"location": "Amsterdam", "forecast": "sunny", "temperature": "25 C"}

> FunctionReplyStream

Assistant: It seems that the current weather in Amsterdam is sunny with a temperature of 25°C. Stay hydrated and enjoy the day!

As you can see, the bot first tried to call get_current_weather with empty arguments, which threw an error, we inject this error back into the FunctionErrorStream, making the bot realize the mistake and ask the user to provide the location. Once provided, the function call is triggered again, this time with the right location and response.

Now take a look on what happened inside the memory, we save both the original function call and the error message there:

memory["history"]
    [OpenAIChatMessage(role='user', content='hi there', name=None),
OpenAIChatMessage(role='assistant', content='Hello! How can I assist you today?', name=None),
OpenAIChatMessage(role='user', content='it is hot today?', name=None),
OpenAIChatMessage(role='function', content='{}', name='get_current_weather'),
OpenAIChatMessage(role='user', content="get_current_weather() missing 1 required positional argument: 'location'", name=None),
OpenAIChatMessage(role='assistant', content='I apologize for the inconvenience. In order to provide you with the current weather, could you please provide me with your location?', name=None),
OpenAIChatMessage(role='user', content='I am in Amsterdam', name=None),
OpenAIChatMessage(role='function', content='{\n "location": "Amsterdam"\n}{"location": "Amsterdam", "forecast": "sunny", "temperature": "25 C"}', name='get_current_weather'),
OpenAIChatMessage(role='assistant', content='It seems that the current weather in Amsterdam is sunny with a temperature of 25°C. Stay hydrated and enjoy the day!', name=None)]

That's it, if you have any questions about this example, join our discord community and we can help you out.

Also, if you are interested in running a bot like this inside a nice UI, check out our docs on Chainlit.