alexliap commited on
Commit
89bcc88
Β·
unverified Β·
1 Parent(s): c3a22a6

add observability

Browse files
Files changed (4) hide show
  1. .gitignore +0 -1
  2. .huggingface/space.yaml +0 -1
  3. agent.py +4 -0
  4. app.py +41 -35
.gitignore CHANGED
@@ -10,4 +10,3 @@ wheels/
10
  .venv
11
  *.lock
12
  .env
13
- .python_version
 
10
  .venv
11
  *.lock
12
  .env
 
.huggingface/space.yaml DELETED
@@ -1 +0,0 @@
1
- sdk: docker
 
 
agent.py CHANGED
@@ -1,3 +1,4 @@
 
1
  from deep_research_agent.nodes import (
2
  BeginResearch,
3
  FinalReport,
@@ -8,6 +9,9 @@ from deep_research_agent.nodes import (
8
  from deep_research_agent.state import ResearchState
9
  from pydantic_graph import Graph
10
 
 
 
 
11
 
12
  async def query_agent(research_query: str):
13
  state = ResearchState()
 
1
+ import logfire
2
  from deep_research_agent.nodes import (
3
  BeginResearch,
4
  FinalReport,
 
9
  from deep_research_agent.state import ResearchState
10
  from pydantic_graph import Graph
11
 
12
+ logfire.configure()
13
+ logfire.instrument_pydantic_ai()
14
+
15
 
16
  async def query_agent(research_query: str):
17
  state = ResearchState()
app.py CHANGED
@@ -1,18 +1,9 @@
1
  import gradio as gr
2
- import asyncio
3
  from agent import query_agent
4
 
5
 
6
  # Simulate your async agent interaction
7
  async def interact_with_agent(prompt: str):
8
- # # This is where you'd call your real query_agent()
9
- # await asyncio.sleep(1)
10
- # urls = [
11
- # "https://arxiv.org/abs/1234.5678",
12
- # "https://openai.com/research",
13
- # "https://example.com/article",
14
- # ]
15
- # report = f"Report generated for: '{prompt}'"
16
  final_report = await query_agent(research_query=prompt)
17
  final_report, refs = (
18
  final_report.split("\n\n## References:\n\n")[0],
@@ -28,35 +19,50 @@ def format_urls(urls):
28
  return urls
29
 
30
 
31
- with gr.Blocks(title="Deep Research Agent") as chat_app:
32
- gr.Markdown("## 🧠 Deep Research Agent")
 
33
 
34
- with gr.Row():
35
- # Main chat area
36
- with gr.Column(scale=4):
37
- chatbot = gr.Chatbot(type="messages", label="Chat", height=550)
38
- msg = gr.Textbox(placeholder="Ask a research question...")
39
- clear = gr.ClearButton([msg, chatbot])
40
 
41
- # Sidebar for URLs
42
- with gr.Column(scale=1):
43
- gr.Markdown("### πŸ”— References")
44
- url_display = gr.Markdown("(No sources yet)")
45
 
46
- # Step 1: show user message immediately
47
- def show_user_msg(message, chat_history: list):
48
- chat_history.append({"role": "user", "content": message})
49
- return chat_history
 
 
50
 
51
- # Step 2: respond asynchronously (simulate the agent)
52
- async def respond(message: str, chat_history: list):
53
- report, urls = await interact_with_agent(message)
54
- chat_history.append({"role": "assistant", "content": report})
55
- return "", chat_history, format_urls(urls)
56
 
57
- # Chain both: show user β†’ respond with URLs
58
- msg.submit(show_user_msg, [msg, chatbot], [chatbot], queue=False).then(
59
- respond, [msg, chatbot], [msg, chatbot, url_display]
60
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
61
 
62
- chat_app.launch()
 
1
  import gradio as gr
 
2
  from agent import query_agent
3
 
4
 
5
  # Simulate your async agent interaction
6
  async def interact_with_agent(prompt: str):
 
 
 
 
 
 
 
 
7
  final_report = await query_agent(research_query=prompt)
8
  final_report, refs = (
9
  final_report.split("\n\n## References:\n\n")[0],
 
19
  return urls
20
 
21
 
22
+ def start_new_query(message):
23
+ # Reset chat and sidebar
24
+ return message, [], ""
25
 
 
 
 
 
 
 
26
 
27
+ if __name__ == "__main__":
28
+ with gr.Blocks(title="Deep Research Agent", fill_height=True) as chat_app:
29
+ gr.Markdown("## 🧠 Deep Research Agent")
 
30
 
31
+ with gr.Row():
32
+ # Main chat area
33
+ with gr.Column(scale=4):
34
+ chatbot = gr.Chatbot(type="messages", label="Chat", height=550)
35
+ msg = gr.Textbox(placeholder="Ask a research question...")
36
+ clear = gr.ClearButton([msg, chatbot])
37
 
38
+ # Sidebar for URLs
39
+ with gr.Column(scale=1):
40
+ gr.Markdown("### πŸ”— References")
41
+ url_display = gr.Markdown("(No sources yet)")
 
42
 
43
+ # Step 1: show user message immediately
44
+ def show_user_msg(message, chat_history: list):
45
+ chat_history.append({"role": "user", "content": message})
46
+ return chat_history
47
+
48
+ # Step 2: respond asynchronously (simulate the agent)
49
+ async def respond(message: str, chat_history: list):
50
+ report, urls = await interact_with_agent(message)
51
+ chat_history.append({"role": "assistant", "content": report})
52
+ return "", chat_history, format_urls(urls)
53
+
54
+ # Chain both: show user β†’ respond with URLs
55
+ # msg.submit(show_user_msg, [msg, chatbot], [chatbot], queue=False).then(
56
+ # respond, [msg, chatbot], [msg, chatbot, url_display]
57
+ # )
58
+ msg.submit(
59
+ start_new_query, [msg], [msg, chatbot, url_display], queue=False
60
+ ).then(
61
+ lambda message, chat_history: [
62
+ {"role": "user", "content": message}
63
+ ], # user message
64
+ [msg, chatbot],
65
+ [chatbot],
66
+ ).then(respond, [msg, chatbot], [msg, chatbot, url_display])
67
 
68
+ chat_app.launch()