File size: 1,533 Bytes
38e0dee
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
from typing import AsyncGenerator

from blaxel.langgraph import bl_model, bl_tools
from langchain.tools import tool
from langchain_core.messages import AIMessageChunk
from langgraph.prebuilt import create_react_agent


@tool
def weather(city: str) -> str:
    """Get the weather in a given city"""
    return f"The weather in {city} is sunny"


async def agent(input: str) -> AsyncGenerator[str, None]:
    prompt = (
        "You are a helpful assistant that can answer questions and help with tasks."
    )
    tools = await bl_tools(["blaxel-search"]) + [weather]
    model = await bl_model("sandbox-openai")
    agent = create_react_agent(model=model, tools=tools, prompt=prompt)
    messages = {"messages": [("user", input)]}
    async for chunk in agent.astream(messages, stream_mode=["updates", "messages"]):
        type_, stream_chunk = chunk
        # This is to stream the response from the agent, filtering response from tools
        if (
            type_ == "messages"
            and len(stream_chunk) > 0
            and isinstance(stream_chunk[0], AIMessageChunk)
        ):
            msg = stream_chunk[0]
            if msg.content:
                if not msg.tool_calls:
                    yield msg.content
        # This to show a call has been made to a tool, usefull if you want to show the tool call in your interface
        if type_ == "updates":
            if "tools" in stream_chunk:
                for msg in stream_chunk["tools"]["messages"]:
                    yield f"Tool call: {msg.name}\n"