|
|
|
|
|
import chainlit as cl |
|
|
from agent import make_graph |
|
|
|
|
|
from langchain_core.messages import AIMessageChunk, ToolMessage |
|
|
import uuid |
|
|
|
|
|
@cl.on_chat_start |
|
|
async def on_chat_start(): |
|
|
|
|
|
thread_id = f"thread-{uuid.uuid4()}" |
|
|
|
|
|
cl.user_session.set("thread_id", thread_id) |
|
|
|
|
|
await cl.Message(content="Hello! You can start chatting with the AI.").send() |
|
|
|
|
|
@cl.on_message |
|
|
async def on_message(message: cl.Message): |
|
|
agent = cl.user_session.get("agent") |
|
|
thread_id = cl.user_session.get("thread_id") |
|
|
|
|
|
config = {"configurable": {"thread_id": thread_id}} |
|
|
|
|
|
|
|
|
msg = cl.Message(content="") |
|
|
|
|
|
try: |
|
|
async with make_graph() as agent: |
|
|
async for stream, metadata in agent.astream({"messages": message.content}, config=config, stream_mode="messages"): |
|
|
|
|
|
if isinstance(stream, AIMessageChunk) and stream.content: |
|
|
await msg.stream_token(stream.content) |
|
|
elif isinstance(stream, ToolMessage) and stream.content: |
|
|
async with cl.Step(type="tool") as step: |
|
|
|
|
|
step.output = stream.content |
|
|
|
|
|
await msg.send() |
|
|
|
|
|
|
|
|
except Exception as e: |
|
|
await cl.Message(content=f"Error during agent invocation: {e}").send() |
|
|
|
|
|
@cl.on_stop |
|
|
async def on_stop(): |
|
|
agent = cl.user_session.get("agent") |
|
|
await agent.__aexit__(None, None, None) |