elismasilva commited on
Commit
c11b8eb
·
1 Parent(s): d10faed

added gemini api requirement.

Browse files
Files changed (2) hide show
  1. app.py +23 -14
  2. services/agent_chat.py +4 -3
app.py CHANGED
@@ -28,9 +28,10 @@ BLAXEL_ICON_URL = "https://huggingface.co/datasets/DEVAIEXP/assets/resolve/main/
28
  MCP_SERVER_URL = os.getenv("MCP_SERVER_URL", "https://mcp-1st-birthday-gitrepo-inspector-mcp.hf.space/")
29
  AGENT_API_URL = os.getenv("AGENT_API_URL", "https://run.blaxel.ai/devaiexp/agents/agent")
30
  BLAXEL_API_KEY = os.getenv("BLAXEL_API_KEY")
 
31
 
32
  # LOGIC LAYER
33
- def _validate_api_keys(llm_api_key, request):
34
  USE_SERVER_KEYS = os.getenv("USE_SERVER_KEYS", "false").lower() in (
35
  "True",
36
  "true",
@@ -39,7 +40,7 @@ def _validate_api_keys(llm_api_key, request):
39
  )
40
  if not USE_SERVER_KEYS and request and request.headers.get("referer"):
41
  if not llm_api_key or not llm_api_key.strip():
42
- raise gr.Error("⚠️ LLM API Key Required! Please enter your own API Key to use this tool in the demo UI.")
43
 
44
  def _get_custom_header():
45
  html = """
@@ -517,8 +518,8 @@ def generate_priority_report(repo_url, provider, model, api_key, request: gr.Req
517
  Generates priority report using Sidebar configs.
518
  Handles tuple return (html, thought) from MCP.
519
  """
 
520
  gr.Info(f"🧠 Generating Strategy Report for {repo_url}... Please wait.")
521
- llm_api_key = _validate_api_keys(api_key, request)
522
 
523
  try:
524
  client = Client(MCP_SERVER_URL)
@@ -526,7 +527,7 @@ def generate_priority_report(repo_url, provider, model, api_key, request: gr.Req
526
  repo_url,
527
  provider,
528
  model,
529
- llm_api_key,
530
  api_name="/prioritize_open_issues"
531
  )
532
 
@@ -708,7 +709,12 @@ with gr.Blocks(title="GitRepo Inspector", theme=theme, css=css_code, head=APP_HE
708
  placeholder="your API Key...",
709
  info="Required for approval and reanalysis actions"
710
  )
711
-
 
 
 
 
 
712
  gr.Markdown("---")
713
  log_limit_slider = gr.Slider(
714
  minimum=10,
@@ -779,14 +785,16 @@ with gr.Blocks(title="GitRepo Inspector", theme=theme, css=css_code, head=APP_HE
779
  inputs=None,
780
  outputs=[chatbot]
781
  )
782
- def init_agent():
783
- return create_dashboard_agent()
784
 
785
 
786
- def interact(agent, prompt, history, current_repo, provider, model, token):
787
  agent_state = agent
788
  if agent_state is None:
789
- agent_state = init_agent()
 
 
790
  if agent_state is None:
791
  history.append(gr.ChatMessage(role="user", content=prompt, metadata={"status": "done"}))
792
  history.append(gr.ChatMessage(
@@ -864,7 +872,8 @@ with gr.Blocks(title="GitRepo Inspector", theme=theme, css=css_code, head=APP_HE
864
  global_repo_url,
865
  global_provider,
866
  global_model,
867
- global_github_token
 
868
  ],
869
  outputs=[chatbot, msg, chat_trigger_issue, agent_state],
870
  show_progress="hidden"
@@ -1025,10 +1034,10 @@ with gr.Blocks(title="GitRepo Inspector", theme=theme, css=css_code, head=APP_HE
1025
  ]
1026
 
1027
  # Added log_limit_slider to inputs
1028
- app.load(
1029
- init_agent,
1030
- outputs=[agent_state]
1031
- )
1032
  app.load(
1033
  fn=hard_refresh,
1034
  inputs=[view_filter, current_sort, log_limit_slider, global_repo_url],
 
28
  MCP_SERVER_URL = os.getenv("MCP_SERVER_URL", "https://mcp-1st-birthday-gitrepo-inspector-mcp.hf.space/")
29
  AGENT_API_URL = os.getenv("AGENT_API_URL", "https://run.blaxel.ai/devaiexp/agents/agent")
30
  BLAXEL_API_KEY = os.getenv("BLAXEL_API_KEY")
31
+ GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY")
32
 
33
  # LOGIC LAYER
34
+ def _validate_api_keys(llm_api_key, request, type="LLM"):
35
  USE_SERVER_KEYS = os.getenv("USE_SERVER_KEYS", "false").lower() in (
36
  "True",
37
  "true",
 
40
  )
41
  if not USE_SERVER_KEYS and request and request.headers.get("referer"):
42
  if not llm_api_key or not llm_api_key.strip():
43
+ raise gr.Error(f"⚠️ {type} API Key Required! Please enter your own API Key to use this tool in the demo UI.")
44
 
45
  def _get_custom_header():
46
  html = """
 
518
  Generates priority report using Sidebar configs.
519
  Handles tuple return (html, thought) from MCP.
520
  """
521
+ _validate_api_keys(api_key, request)
522
  gr.Info(f"🧠 Generating Strategy Report for {repo_url}... Please wait.")
 
523
 
524
  try:
525
  client = Client(MCP_SERVER_URL)
 
527
  repo_url,
528
  provider,
529
  model,
530
+ api_key,
531
  api_name="/prioritize_open_issues"
532
  )
533
 
 
709
  placeholder="your API Key...",
710
  info="Required for approval and reanalysis actions"
711
  )
712
+ global_gemini_key = gr.Textbox(
713
+ label="Gemini Assistant API Key",
714
+ type="password",
715
+ placeholder="your API Key...",
716
+ info="Gemini API Key required for AI Assistant Chat"
717
+ )
718
  gr.Markdown("---")
719
  log_limit_slider = gr.Slider(
720
  minimum=10,
 
785
  inputs=None,
786
  outputs=[chatbot]
787
  )
788
+ def init_agent(gemini_api_key):
789
+ return create_dashboard_agent(gemini_api_key)
790
 
791
 
792
+ def interact(agent, prompt, history, current_repo, provider, model, token, gemini_api_key, request: gr.Request=None):
793
  agent_state = agent
794
  if agent_state is None:
795
+ _validate_api_keys(gemini_api_key, request, "Gemini Assistant")
796
+ gr.Info("⚡ Please wait, initializing the agent for the first time...")
797
+ agent_state = init_agent(gemini_api_key)
798
  if agent_state is None:
799
  history.append(gr.ChatMessage(role="user", content=prompt, metadata={"status": "done"}))
800
  history.append(gr.ChatMessage(
 
872
  global_repo_url,
873
  global_provider,
874
  global_model,
875
+ global_github_token,
876
+ global_gemini_key
877
  ],
878
  outputs=[chatbot, msg, chat_trigger_issue, agent_state],
879
  show_progress="hidden"
 
1034
  ]
1035
 
1036
  # Added log_limit_slider to inputs
1037
+ # app.load(
1038
+ # init_agent,
1039
+ # outputs=[agent_state]
1040
+ # )
1041
  app.load(
1042
  fn=hard_refresh,
1043
  inputs=[view_filter, current_sort, log_limit_slider, global_repo_url],
services/agent_chat.py CHANGED
@@ -3,7 +3,7 @@ import os
3
  import re
4
  import json
5
  from typing import Dict, List, Any, Optional
6
- from smolagents import CodeAgent, LiteLLMModel, tool, ToolCallingAgent
7
  from smolagents.mcp_client import MCPClient as SmolMCPClient
8
  from gradio_client import Client as GradioClient
9
  from config.constants import AVAILABLE_MODELS_BY_PROVIDER
@@ -176,13 +176,14 @@ def trigger_live_analysis(
176
  # AGENT FACTORY
177
  # ===================================================================
178
 
179
- def create_dashboard_agent():
180
  print("Initializing GitRepo Inspector Agent...")
181
 
182
  model = LiteLLMModel(
183
  model_id="gemini/gemini-2.5-flash",
184
  temperature=0.1,
185
- max_tokens=2048
 
186
  )
187
 
188
  # Loads the clean prompt from YAML config
 
3
  import re
4
  import json
5
  from typing import Dict, List, Any, Optional
6
+ from smolagents import LiteLLMModel, tool, ToolCallingAgent
7
  from smolagents.mcp_client import MCPClient as SmolMCPClient
8
  from gradio_client import Client as GradioClient
9
  from config.constants import AVAILABLE_MODELS_BY_PROVIDER
 
176
  # AGENT FACTORY
177
  # ===================================================================
178
 
179
+ def create_dashboard_agent(gemini_api_key):
180
  print("Initializing GitRepo Inspector Agent...")
181
 
182
  model = LiteLLMModel(
183
  model_id="gemini/gemini-2.5-flash",
184
  temperature=0.1,
185
+ max_tokens=2048,
186
+ api_key=gemini_api_key
187
  )
188
 
189
  # Loads the clean prompt from YAML config