alexliap's picture
add observability
89bcc88 unverified
raw
history blame
2.34 kB
import gradio as gr
from agent import query_agent
# Simulate your async agent interaction
async def interact_with_agent(prompt: str):
final_report = await query_agent(research_query=prompt)
final_report, refs = (
final_report.split("\n\n## References:\n\n")[0],
final_report.split("\n\n## References:\n\n")[1],
)
return final_report, refs
# Display URLs in Markdown
def format_urls(urls):
if not urls:
return "(No sources yet)"
return urls
def start_new_query(message):
# Reset chat and sidebar
return message, [], ""
if __name__ == "__main__":
with gr.Blocks(title="Deep Research Agent", fill_height=True) as chat_app:
gr.Markdown("## 🧠 Deep Research Agent")
with gr.Row():
# Main chat area
with gr.Column(scale=4):
chatbot = gr.Chatbot(type="messages", label="Chat", height=550)
msg = gr.Textbox(placeholder="Ask a research question...")
clear = gr.ClearButton([msg, chatbot])
# Sidebar for URLs
with gr.Column(scale=1):
gr.Markdown("### πŸ”— References")
url_display = gr.Markdown("(No sources yet)")
# Step 1: show user message immediately
def show_user_msg(message, chat_history: list):
chat_history.append({"role": "user", "content": message})
return chat_history
# Step 2: respond asynchronously (simulate the agent)
async def respond(message: str, chat_history: list):
report, urls = await interact_with_agent(message)
chat_history.append({"role": "assistant", "content": report})
return "", chat_history, format_urls(urls)
# Chain both: show user β†’ respond with URLs
# msg.submit(show_user_msg, [msg, chatbot], [chatbot], queue=False).then(
# respond, [msg, chatbot], [msg, chatbot, url_display]
# )
msg.submit(
start_new_query, [msg], [msg, chatbot, url_display], queue=False
).then(
lambda message, chat_history: [
{"role": "user", "content": message}
], # user message
[msg, chatbot],
[chatbot],
).then(respond, [msg, chatbot], [msg, chatbot, url_display])
chat_app.launch()