Skip to main content

Simple Bot with Weather Tool

Below is a code example of a bot you can talk too which has the ability of checking the weather, it has memory, it is using OpenAI functions, and it streams its outputs:

import json
from typing import List, Literal, TypedDict
from langstream import Stream, debug, as_async_generator

from langstream.contrib.llms.open_ai import (
OpenAIChatStream,
OpenAIChatDelta,
OpenAIChatMessage,
)


class Memory(TypedDict):
history: List[OpenAIChatMessage]


memory = Memory(history=[])


def save_message_to_memory(message: OpenAIChatMessage) -> OpenAIChatMessage:
memory["history"].append(message)
return message


def update_delta_on_memory(delta: OpenAIChatDelta) -> OpenAIChatDelta:
if memory["history"][-1].role != delta.role and delta.role is not None:
memory["history"].append(
OpenAIChatMessage(role=delta.role, content=delta.content, name=delta.name)
)
else:
memory["history"][-1].content += delta.content
return delta


def get_current_weather(
location: str, format: Literal["celsius", "fahrenheit"] = "celsius"
) -> OpenAIChatDelta:
result = {
"location": location,
"forecast": "sunny",
"temperature": "25 C" if format == "celsius" else "77 F",
}

return OpenAIChatDelta(
role="function", name="get_current_weather", content=json.dumps(result)
)


# Stream Definitions

weather_stream = (
debug(
OpenAIChatStream[str, OpenAIChatDelta](
"WeatherStream",
lambda user_input: [
*memory["history"],
save_message_to_memory(
OpenAIChatMessage(role="user", content=user_input),
),
],
model="gpt-3.5-turbo-0613",
functions=[
{
"name": "get_current_weather",
"description": "Gets the current weather in a given location, use this function for any questions related to the weather",
"parameters": {
"type": "object",
"properties": {
"location": {
"description": "The city to get the weather, e.g. San Francisco. Guess the location from user messages",
"type": "string",
},
"format": {
"description": "A string with the full content of what the given role said",
"type": "string",
"enum": ("celsius", "fahrenheit"),
},
},
"required": ["location"],
},
}
],
temperature=0,
)
)
.map(
# Call the function if the model produced a function call by parsing the json arguments
lambda delta: get_current_weather(**json.loads(delta.content))
if delta.role == "function" and delta.name == "get_current_weather"
else delta
)
.map(update_delta_on_memory)
)

function_reply_stream = debug(
OpenAIChatStream[None, OpenAIChatDelta](
"FunctionReplyStream",
lambda _: memory["history"],
model="gpt-3.5-turbo-0613",
temperature=0,
)
).map(update_delta_on_memory)

weather_bot: Stream[str, OpenAIChatDelta] = weather_stream.and_then(
# Reply based on function result if last output was a function output
lambda outputs: function_reply_stream(None)
if list(outputs)[-1].role == "function"
# Otherwise just re-yield the outputs
else as_async_generator(*outputs)
)
from langstream.utils.stream import collect_final_output

_ = await collect_final_output(weather_bot("hi there"))
    

> WeatherStream

Assistant: Hello! How can I assist you today?
_ = await collect_final_output(weather_bot("is it hot today in Amsterdam?"))
    

> WeatherStream

Function get_current_weather: {
"location": "Amsterdam"
}

> FunctionReplyStream

Assistant: Yes, it is hot today in Amsterdam. The current temperature is 25°C and it is sunny.

The bot is working well, it replies chit-chat messages as well as calling the weather function when needed, and replying to the user in natural language.

Let's inspect what's inside the bot memory:

memory['history']
    [OpenAIChatMessage(role='user', content='hi there', name=None),
OpenAIChatMessage(role='assistant', content='Hello! How can I assist you today?', name=None),
OpenAIChatMessage(role='user', content='is it hot today in Amsterdam?', name=None),
OpenAIChatMessage(role='function', content='{"location": "Amsterdam", "forecast": "sunny", "temperature": "25 C"}', name='get_current_weather'),
OpenAIChatMessage(role='assistant', content='Yes, it is hot today in Amsterdam. The current temperature is 25°C and it is sunny.', name=None)]

It saved both the conversation and the results of the function call, this way, continued conversations will be able to use the previous context, include the previous function result.

That's it, if you have any questions about this example, join our discord community and we can help you out.

Also, if you are interested in running a bot like this inside a nice UI, check out our docs on Chainlit.