|
|
import os |
|
|
import re |
|
|
import time |
|
|
import gradio as gr |
|
|
import markdown |
|
|
import pandas as pd |
|
|
import requests |
|
|
import services.db as db |
|
|
import xml.etree.ElementTree as ET |
|
|
from dotenv import load_dotenv |
|
|
from gradio_client import Client |
|
|
from gradio_htmlplus import HTMLPlus |
|
|
from gradio_bottombar import BottomBar |
|
|
from gradio_buttonplus import ButtonPlus |
|
|
from services import charts |
|
|
from services.table_renderer import generate_issues_html |
|
|
from services.chat_utils import stream_to_gradio |
|
|
from services.agent_chat import create_dashboard_agent |
|
|
from config.constants import AVAILABLE_MODELS_BY_PROVIDER |
|
|
|
|
|
load_dotenv() |
|
|
ACTIVE_SYNCS = set() |
|
|
DEFAULT_SORT = {"col": "updated_at", "asc": False} |
|
|
DEFAULT_REPO = "https://github.com/gradio-app/gradio" |
|
|
BLAXEL_ICON_URL = "https://huggingface.co/datasets/DEVAIEXP/assets/resolve/main/blaxel_logo.png" |
|
|
|
|
|
|
|
|
MCP_SERVER_URL = os.getenv("MCP_SERVER_URL", "https://mcp-1st-birthday-gitrepo-inspector-mcp.hf.space/") |
|
|
AGENT_API_URL = os.getenv("AGENT_API_URL", "https://run.blaxel.ai/devaiexp/agents/agent") |
|
|
BLAXEL_API_KEY = os.getenv("BLAXEL_API_KEY") |
|
|
GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY") |
|
|
|
|
|
|
|
|
def _validate_api_keys(llm_api_key, request, type="LLM"): |
|
|
USE_SERVER_KEYS = os.getenv("USE_SERVER_KEYS", "false").lower() in ( |
|
|
"True", |
|
|
"true", |
|
|
"1", |
|
|
"yes", |
|
|
) |
|
|
if not USE_SERVER_KEYS and request and request.headers.get("referer"): |
|
|
if not llm_api_key or not llm_api_key.strip(): |
|
|
raise gr.Error(f"⚠️ {type} API Key Required! Please enter your own API Key to use this tool in the demo UI.") |
|
|
|
|
|
def _get_custom_header(): |
|
|
html = """ |
|
|
<style> |
|
|
.header-container { |
|
|
background: linear-gradient(314deg, #64748b 0%, #373f4a 100%); |
|
|
padding: 30px 20px; |
|
|
border-radius: 16px; |
|
|
color: white !important; |
|
|
text-align: center; |
|
|
box-shadow: 0 10px 15px -3px rgba(0, 0, 0, 0.1), 0 4px 6px -2px rgba(0, 0, 0, 0.05); |
|
|
margin-bottom: 25px; |
|
|
font-family: 'Inter', -apple-system, sans-serif; |
|
|
} |
|
|
|
|
|
.header-content { |
|
|
max-width: 800px; |
|
|
margin: 0 auto; |
|
|
} |
|
|
|
|
|
.header-title { |
|
|
color: white !important; |
|
|
font-size: 2.5rem; |
|
|
font-weight: 800; |
|
|
margin: 0; |
|
|
display: flex; |
|
|
align-items: center; |
|
|
justify-content: center; |
|
|
gap: 15px; |
|
|
letter-spacing: -0.02em; |
|
|
text-shadow: 0 2px 4px rgba(0,0,0,0.1); |
|
|
} |
|
|
|
|
|
.header-icon { |
|
|
font-size: 3rem; |
|
|
filter: drop-shadow(0 2px 4px rgba(0,0,0,0.1)); |
|
|
} |
|
|
|
|
|
.header-subtitle { |
|
|
color: var(--neutral-200) !important; |
|
|
font-size: 1.1rem; |
|
|
font-weight: 400; |
|
|
margin-top: 8px; |
|
|
opacity: 0.9; |
|
|
letter-spacing: 0.01em; |
|
|
} |
|
|
|
|
|
.header-footer { |
|
|
color: var(--neutral-200) !important; |
|
|
margin-top: 25px; |
|
|
padding-top: 15px; |
|
|
border-top: 1px solid rgba(255, 255, 255, 0.2); |
|
|
font-size: 0.85rem; |
|
|
font-weight: 500; |
|
|
opacity: 0.85; |
|
|
display: flex; |
|
|
justify-content: center; |
|
|
flex-wrap: wrap; |
|
|
gap: 15px; |
|
|
} |
|
|
|
|
|
.header-tag { |
|
|
color: var(--neutral-200) !important; |
|
|
display: flex; |
|
|
align-items: center; |
|
|
gap: 6px; |
|
|
} |
|
|
|
|
|
.separator { |
|
|
color: var(--neutral-200) !important; |
|
|
opacity: 0.4; |
|
|
} |
|
|
.header-img-icon { |
|
|
width: 18px; |
|
|
height: 18px; |
|
|
vertical-align: text-bottom; |
|
|
opacity: 0.9; |
|
|
} |
|
|
</style> |
|
|
|
|
|
<div class="header-container"> |
|
|
<div class="header-content"> |
|
|
<div class="header-title"> |
|
|
<span class="header-icon">🕵️♂️</span> GitRepo Inspector |
|
|
</div> |
|
|
<div class="header-subtitle"> |
|
|
An Autonomous Ecosystem for GitHub Issue Management |
|
|
</div> |
|
|
|
|
|
<div class="header-footer"> |
|
|
<span class="header-tag">Powered by Gradio 🚀</span> |
|
|
<span class="separator">|</span> |
|
|
<span class="header-tag"> |
|
|
Blaxel Agents |
|
|
<img src="{BLAXEL_ICON_URL}" class="header-img-icon" alt="icon"> |
|
|
</span> |
|
|
<span class="separator">|</span> |
|
|
<span class="header-tag">MCP Protocol 🔌</span> |
|
|
<span class="separator">|</span> |
|
|
<span class="header-tag">Gemini • SambaNova • OpenAI • Nebius</span> |
|
|
</div> |
|
|
</div> |
|
|
</div> |
|
|
""" |
|
|
return html.replace("{BLAXEL_ICON_URL}", BLAXEL_ICON_URL) |
|
|
|
|
|
def _format_logs_to_html(logs_df): |
|
|
""" |
|
|
Converts logs to HTML list with Icons and Client-side Timezone conversion. |
|
|
""" |
|
|
if logs_df.empty: |
|
|
return "<div style='color: #666; padding: 10px;'>Waiting for agent activity...</div>" |
|
|
|
|
|
html_content = """ |
|
|
<style> |
|
|
.log-entry { |
|
|
display: flex; |
|
|
align-items: center; |
|
|
padding: 4px 0; |
|
|
border-bottom: 1px solid rgba(128,128,128, 0.1); |
|
|
font-family: 'Consolas', monospace; |
|
|
font-size: 12px; |
|
|
color: var(--body-text-color); |
|
|
} |
|
|
.log-icon-img { |
|
|
width: 20px; |
|
|
height: 20px; |
|
|
margin-right: 8px; |
|
|
border-radius: 4px; |
|
|
} |
|
|
.log-time { |
|
|
color: #6B7280; |
|
|
margin-right: 10px; |
|
|
min-width: 70px; |
|
|
font-weight: bold; |
|
|
} |
|
|
.log-type { margin-right: 8px; } |
|
|
</style> |
|
|
<div id="log-container"> |
|
|
""" |
|
|
|
|
|
for _, row in logs_df.iterrows(): |
|
|
|
|
|
iso_time = row['created_at'].isoformat() |
|
|
|
|
|
etype = str(row['event_type']).lower() |
|
|
icon_char = "🧠" if 'thought' in etype else "🔌" if 'tool' in etype else "❌" if 'error' in etype else "✅" if 'success' in etype else "ℹ️" |
|
|
|
|
|
issue_tag = f"<span style='color: #3B82F6;'>[#{int(row['issue_number'])}]</span> " if pd.notna(row['issue_number']) and row['issue_number'] != "None" else "" |
|
|
|
|
|
|
|
|
html_content += f""" |
|
|
<div class="log-entry"> |
|
|
<img src="{BLAXEL_ICON_URL}" width=32 class="log-icon-img" alt="icon"> |
|
|
<!-- JS will fill this span with local time --> |
|
|
<span class="log-time local-time" data-timestamp="{iso_time}">--:--:--</span> |
|
|
<span class="log-type">{icon_char}</span> |
|
|
<span>{issue_tag}{row['message']}</span> |
|
|
</div> |
|
|
""" |
|
|
|
|
|
html_content += "</div>" |
|
|
|
|
|
return html_content |
|
|
|
|
|
def parse_response_with_reasoning(text: str): |
|
|
try: |
|
|
root = ET.fromstring(f"<root>{text.strip()}</root>") |
|
|
response = root.findtext("response") or text.strip() |
|
|
reasoning = root.findtext("reasoning") or "" |
|
|
except: |
|
|
response = text.strip() |
|
|
reasoning = "" |
|
|
|
|
|
clean_response = re.sub(r"\[VIEW:#\d+\]", "", response).strip() |
|
|
|
|
|
if reasoning.strip(): |
|
|
accordion_html = f""" |
|
|
<details style="margin: 12px 0; padding: 12px; background: var(--block-background-fill); border: 1px solid var(--border-color-accent); border-radius: 8px; cursor: pointer;"> |
|
|
<summary style="font-weight: 600; color: var(--primary-500); list-style: none; margin: -12px -12px 8px -12px; padding: 12px; background: var(--block-background-fill); border-radius: 8px 8px 0 0;"> |
|
|
Raciocínio do agente (clique para expandir) |
|
|
</summary> |
|
|
<div style="font-size: 13px; line-height: 1.6; color: var(--neutral-600); padding: 0 4px;"> |
|
|
{reasoning.strip().replace(chr(10), '<br>')} |
|
|
</div> |
|
|
</details> |
|
|
""" |
|
|
return clean_response, accordion_html |
|
|
else: |
|
|
return clean_response, None |
|
|
|
|
|
def fetch_fresh_data_to_cache(log_limit=50, repo_url=None): |
|
|
""" |
|
|
Fetches all necessary data from DB. |
|
|
""" |
|
|
stats_df = db.fetch_dashboard_stats(repo_url) |
|
|
total_open = db.get_total_open_issues_count(repo_url) |
|
|
donut_fig = charts.create_verdict_donut(stats_df) |
|
|
funnel_fig = charts.create_efficiency_funnel(stats_df, total_open) |
|
|
|
|
|
logs_df = db.fetch_agent_logs(limit=log_limit) |
|
|
logs_text = _format_logs_to_html(logs_df) |
|
|
|
|
|
full_df = db.fetch_issues_dataframe("all", repo_url) |
|
|
timeline_fig = charts.create_timeline_chart(full_df) |
|
|
|
|
|
return donut_fig, funnel_fig, timeline_fig, full_df, logs_text |
|
|
|
|
|
def render_from_cache(df_cache, view_filter, sort_config, page=1): |
|
|
if df_cache is None or df_cache.empty: |
|
|
return generate_issues_html(pd.DataFrame()), "Page 1 / 1", 1 |
|
|
|
|
|
if "Action" in view_filter: |
|
|
df = df_cache[ |
|
|
(df_cache['status'] == 'pending_approval') & |
|
|
(df_cache['proposed_action'].notnull()) |
|
|
] |
|
|
else: |
|
|
df = df_cache |
|
|
|
|
|
if sort_config and sort_config['col'] in df.columns: |
|
|
df = df.sort_values( |
|
|
by=sort_config['col'], |
|
|
ascending=sort_config['asc'], |
|
|
na_position='last' |
|
|
) |
|
|
|
|
|
total_rows = len(df) |
|
|
total_pages = (total_rows // 50) + 1 if total_rows > 0 else 1 |
|
|
|
|
|
page = max(1, min(page, total_pages)) |
|
|
|
|
|
start = (page - 1) * 50 |
|
|
end = start + 50 |
|
|
|
|
|
df_page = df.iloc[start:end] |
|
|
|
|
|
html = generate_issues_html(df_page, sort_config.get('col'), sort_config.get('asc')) |
|
|
page_label = f"Page **{page}** of {total_pages} ({total_rows} items)" |
|
|
|
|
|
return html, page_label, page |
|
|
|
|
|
def refresh_dashboard_data(view_filter, sort_config=None, log_limit=50, repo_url=None): |
|
|
if sort_config is None: |
|
|
sort_config = DEFAULT_SORT |
|
|
|
|
|
donut_fig, timeline_fig, full_df, logs_text = fetch_fresh_data_to_cache(log_limit, repo_url) |
|
|
|
|
|
html_table, page_label, page = render_from_cache(full_df, view_filter, sort_config, 1) |
|
|
|
|
|
return donut_fig, timeline_fig, html_table, logs_text, full_df, page_label, page |
|
|
|
|
|
def load_issue_details(issue_number, repo_url): |
|
|
""" |
|
|
Loads details for an issue. |
|
|
Strategy: Check Agent Cache -> Fallback to DB. |
|
|
""" |
|
|
if not issue_number or issue_number == 0: |
|
|
return gr.skip(), gr.skip(), gr.skip(), gr.skip(), gr.skip(), gr.skip(), gr.skip(), gr.skip(), gr.skip() |
|
|
|
|
|
body = None |
|
|
action_json = None |
|
|
status = "pending_approval" |
|
|
thought_content = "" |
|
|
wrapped_html = "" |
|
|
|
|
|
body, action_json, status, thought_process = db.fetch_issue_details_by_id(issue_number, repo_url) |
|
|
raw_body = body if body else "No content." |
|
|
|
|
|
|
|
|
if "<div" in raw_body and "Analyzed by" in raw_body: |
|
|
final_html = raw_body.replace("```html", "").replace("```", "") |
|
|
else: |
|
|
final_html = markdown.markdown(raw_body, extensions=['tables', 'fenced_code']) |
|
|
final_html = f"<h3 style='color:#4B5563; border-bottom:1px solid #eee; padding-bottom:5px;'>Analysis Report</h3>{final_html}" |
|
|
|
|
|
wrapped_html = f""" |
|
|
<div style=" |
|
|
font-family: -apple-system, system-ui, sans-serif; |
|
|
line-height: 1.6; |
|
|
color: var(--body-text-color); |
|
|
padding: 20px; |
|
|
background: var(--background-fill-secondary); |
|
|
border-radius: 8px; |
|
|
border: 1px solid var(--border-color-primary); |
|
|
max-height: 600px; |
|
|
overflow-y: auto; |
|
|
"> |
|
|
{final_html} |
|
|
</div> |
|
|
""" |
|
|
|
|
|
def get_summary_text(st, act): |
|
|
if st == 'pending_approval': |
|
|
return f"### ⚠️ Action Required\n**Issue #{issue_number}** is marked as **{act.get('reason', 'resolved') if act else 'needs review'}**." |
|
|
elif st == 'executed': |
|
|
return f"### ✅ Action Executed\n**Issue #{issue_number}** was closed." |
|
|
else: |
|
|
return f"### ℹ️ Status: {st}" |
|
|
|
|
|
has_thought = thought_process is not None and len(thought_process.strip()) > 0 |
|
|
thought_content = thought_process if has_thought else "No thought process available." |
|
|
|
|
|
summary_text = get_summary_text(status, action_json) |
|
|
|
|
|
if not body: |
|
|
placeholder_html = f""" |
|
|
<div style=" |
|
|
font-family: -apple-system, system-ui, sans-serif; |
|
|
color: #64748B; |
|
|
padding: 40px; |
|
|
text-align: center; |
|
|
background: var(--background-fill-secondary); |
|
|
border-radius: 8px; |
|
|
border: 1px dashed var(--border-color-primary); |
|
|
"> |
|
|
<h3>Waiting for Analysis ⏳</h3> |
|
|
<p>This issue has not been processed by the Agent yet.</p> |
|
|
<p>Click <b>'Analyze Now'</b> to start manual analysis.</p> |
|
|
</div> |
|
|
""" |
|
|
|
|
|
return ( |
|
|
placeholder_html, |
|
|
gr.update(value="", interactive=False, label="No Proposal"), |
|
|
"", |
|
|
gr.update(visible=False), |
|
|
gr.update(visible=True, interactive=True, value="▶️ Analyze Now"), |
|
|
summary_text, |
|
|
"", |
|
|
gr.update(visible=True, open=True), |
|
|
gr.update(visible=False, open=False) |
|
|
) |
|
|
|
|
|
is_executed = (status == 'executed') |
|
|
is_pending = (status == 'pending_approval') |
|
|
|
|
|
comment_val = action_json.get('comment', '') if action_json else "" |
|
|
|
|
|
comment_update = gr.update( |
|
|
value=comment_val, |
|
|
interactive=is_pending, |
|
|
label="Executed Comment" if is_executed else "Proposed Comment" |
|
|
) |
|
|
|
|
|
auth_vis = gr.update(visible=is_pending) |
|
|
|
|
|
reanalyze_vis = gr.update(visible=not is_executed, interactive=not is_executed, value="🔄 Re-Analyze Issue") |
|
|
|
|
|
return wrapped_html, comment_update, "", auth_vis, reanalyze_vis, summary_text, thought_content, gr.update(visible=True, open=True), gr.update(visible=has_thought, open=False) |
|
|
|
|
|
def trigger_sync_action(repo_url, user_token): |
|
|
""" |
|
|
Calls the MCP sync tool for the selected repository. |
|
|
""" |
|
|
if not repo_url: |
|
|
error = "Please select or type a repository URL." |
|
|
gr.Info(f"⚠️ {error}") |
|
|
return error |
|
|
|
|
|
clean_url = repo_url.strip().rstrip("/") |
|
|
|
|
|
if not clean_url.startswith("https://github.com/"): |
|
|
error = "Invalid URL. Must start with https://github.com/" |
|
|
gr.Info(f"⚠️ {error}") |
|
|
return error |
|
|
|
|
|
if clean_url in ACTIVE_SYNCS: |
|
|
error = "Sync already in progress for {clean_url}. Please wait..." |
|
|
gr.Info(f"⚠️ {error}") |
|
|
return error |
|
|
|
|
|
if not user_token: |
|
|
error = "GitHub Token Required!" |
|
|
gr.Info(f"⚠️ {error}") |
|
|
return error |
|
|
|
|
|
ACTIVE_SYNCS.add(clean_url) |
|
|
try: |
|
|
client = Client(MCP_SERVER_URL) |
|
|
|
|
|
job = client.submit( |
|
|
repo_url, |
|
|
user_token, |
|
|
api_name="/sync_repository" |
|
|
) |
|
|
|
|
|
for update in job: |
|
|
yield f"{update}" |
|
|
|
|
|
except Exception as e: |
|
|
yield f"❌ Sync Failed: {str(e)}" |
|
|
finally: |
|
|
if clean_url in ACTIVE_SYNCS: |
|
|
ACTIVE_SYNCS.remove(clean_url) |
|
|
|
|
|
def trigger_manual_reanalysis(issue_number, repo_url, provider, model, github_token, api_key, request: gr.Request = None): |
|
|
""" |
|
|
Calls the Blaxel Agent to force a re-analysis. |
|
|
""" |
|
|
if not issue_number: |
|
|
error = "Select an issue first." |
|
|
gr.Info(f"⚠️ {error}") |
|
|
return error |
|
|
|
|
|
_validate_api_keys(api_key, request) |
|
|
|
|
|
if not AGENT_API_URL: |
|
|
error = "Agent URL not configured." |
|
|
gr.Info(f"⚠️ {error}") |
|
|
return error |
|
|
|
|
|
payload = { |
|
|
"repo_url": repo_url, |
|
|
"provider": provider, |
|
|
"model": model, |
|
|
"specific_issue": int(issue_number), |
|
|
"github_token": github_token if github_token else None |
|
|
} |
|
|
gr.Info(f"⏳ Starting analysis for #{issue_number}. Please wait...") |
|
|
try: |
|
|
headers = {"Authorization": f"Bearer {BLAXEL_API_KEY}", "Content-Type": "application/json"} |
|
|
with requests.post(AGENT_API_URL, json=payload, headers=headers, stream=True) as resp: |
|
|
for _ in resp.iter_lines(): |
|
|
pass |
|
|
return f"✅ Analysis completed for #{issue_number}. Check the Trace Log tab for details." |
|
|
except Exception as e: |
|
|
return f"❌ Agent Trigger Failed: {e}" |
|
|
|
|
|
def execute_approval_workflow(issue_number, repo_url, user_token, final_comment): |
|
|
if not user_token: |
|
|
error = "GitHub Token Required! Check Sidebar." |
|
|
gr.Info(f"⚠️ {error}") |
|
|
return error |
|
|
|
|
|
try: |
|
|
proposal = db.get_proposed_action_payload(issue_number, repo_url) |
|
|
|
|
|
|
|
|
|
|
|
should_close = True |
|
|
if proposal: |
|
|
should_close = proposal.get('action') == 'close' |
|
|
|
|
|
|
|
|
if not final_comment: |
|
|
if proposal: |
|
|
final_comment = proposal.get('comment', "Closing via GitRepo Inspector.") |
|
|
else: |
|
|
final_comment = "Closing via GitRepo Inspector (Manual Action)." |
|
|
|
|
|
client = Client(MCP_SERVER_URL) |
|
|
result = client.predict( |
|
|
repo_url, |
|
|
int(issue_number), |
|
|
final_comment, |
|
|
should_close, |
|
|
user_token, |
|
|
api_name="/reply_and_close_issue" |
|
|
) |
|
|
|
|
|
db.update_issue_status(issue_number, repo_url, "executed", final_comment) |
|
|
|
|
|
return f"✅ Success: {result}" |
|
|
|
|
|
except Exception as e: |
|
|
|
|
|
if isinstance(e, gr.Error): |
|
|
raise e |
|
|
|
|
|
return f"❌ Error: {str(e)}" |
|
|
|
|
|
def generate_priority_report(repo_url, provider, model, api_key, request: gr.Request = None): |
|
|
""" |
|
|
Generates priority report using Sidebar configs. |
|
|
Handles tuple return (html, thought) from MCP. |
|
|
""" |
|
|
_validate_api_keys(api_key, request) |
|
|
gr.Info(f"🧠 Generating Strategy Report for {repo_url}... Please wait.") |
|
|
|
|
|
try: |
|
|
client = Client(MCP_SERVER_URL) |
|
|
job = client.submit( |
|
|
repo_url, |
|
|
provider, |
|
|
model, |
|
|
api_key, |
|
|
api_name="/prioritize_open_issues" |
|
|
) |
|
|
|
|
|
for tuple_result in job: |
|
|
if isinstance(tuple_result, (list, tuple)) and len(tuple_result) > 0: |
|
|
html_content = tuple_result[0] |
|
|
yield html_content |
|
|
else: |
|
|
yield tuple_result |
|
|
|
|
|
except Exception as e: |
|
|
return f"❌ Error: {str(e)}" |
|
|
|
|
|
def get_repo_choices(): |
|
|
repos = db.fetch_distinct_repos() |
|
|
|
|
|
if DEFAULT_REPO not in repos: |
|
|
repos.insert(0, DEFAULT_REPO) |
|
|
return repos |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
css_code = "" |
|
|
try: |
|
|
with open("./style.css", "r", encoding="utf-8") as f: |
|
|
css_code += f.read() + "\n" |
|
|
except FileNotFoundError: |
|
|
pass |
|
|
|
|
|
|
|
|
APP_HEAD = """ |
|
|
<script> |
|
|
window.convert_timestamps = function() { |
|
|
console.log("🕒 Converting timestamps..."); |
|
|
const spans = document.querySelectorAll('.local-time'); |
|
|
spans.forEach(span => { |
|
|
const ts = span.getAttribute('data-timestamp'); |
|
|
if (ts) { |
|
|
const date = new Date(ts); |
|
|
span.innerText = date.toLocaleTimeString([], { hour12: false }); |
|
|
} |
|
|
}); |
|
|
} |
|
|
window.print_report = function() { |
|
|
const report = document.getElementById('prio-report'); |
|
|
if (!report) return; |
|
|
|
|
|
// Get inner HTML content (within shadow DOM or Gradio div) |
|
|
// Gradio 5 encapsulates, so we need to find real content |
|
|
const content = report.querySelector('.prose') || report; |
|
|
|
|
|
const printWindow = window.open('', '', 'height=600,width=800'); |
|
|
printWindow.document.write('<html><head><title>Priority Report</title>'); |
|
|
printWindow.document.write('<style>body{font-family:sans-serif; padding:20px;} table{width:100%; border-collapse:collapse;} th,td{border:1px solid #ddd; padding:8px;}</style>'); |
|
|
printWindow.document.write('</head><body>'); |
|
|
printWindow.document.write(content.innerHTML); |
|
|
printWindow.document.write('</body></html>'); |
|
|
printWindow.document.close(); |
|
|
printWindow.print(); |
|
|
} |
|
|
</script> |
|
|
""" |
|
|
|
|
|
theme = gr.themes.Default( |
|
|
primary_hue='blue', |
|
|
secondary_hue='teal', |
|
|
neutral_hue='neutral' |
|
|
).set( |
|
|
body_background_fill='*neutral_100', |
|
|
body_background_fill_dark='*neutral_900', |
|
|
body_text_color='*neutral_700', |
|
|
body_text_color_dark='*neutral_200', |
|
|
body_text_weight='400', |
|
|
link_text_color='*primary_500', |
|
|
link_text_color_dark='*primary_400', |
|
|
code_background_fill='*neutral_100', |
|
|
code_background_fill_dark='*neutral_800', |
|
|
shadow_drop='0 1px 3px rgba(0,0,0,0.1)', |
|
|
shadow_inset='inset 0 2px 4px rgba(0,0,0,0.05)', |
|
|
block_background_fill='*neutral_50', |
|
|
block_background_fill_dark='*neutral_700', |
|
|
block_border_color='*neutral_200', |
|
|
block_border_color_dark='*neutral_600', |
|
|
block_border_width='1px', |
|
|
block_border_width_dark='1px', |
|
|
block_label_background_fill='*primary_50', |
|
|
block_label_background_fill_dark='*primary_600', |
|
|
block_label_text_color='*primary_600', |
|
|
block_label_text_color_dark='*primary_50', |
|
|
panel_background_fill='white', |
|
|
panel_background_fill_dark='*neutral_800', |
|
|
panel_border_color='*neutral_200', |
|
|
panel_border_color_dark='*neutral_700', |
|
|
panel_border_width='1px', |
|
|
panel_border_width_dark='1px', |
|
|
input_background_fill='white', |
|
|
input_background_fill_dark='*neutral_800', |
|
|
input_border_color='*neutral_300', |
|
|
input_border_color_dark='*neutral_700', |
|
|
slider_color='*primary_500', |
|
|
slider_color_dark='*primary_400', |
|
|
button_primary_background_fill='*primary_600', |
|
|
button_primary_background_fill_dark='*primary_500', |
|
|
button_primary_background_fill_hover='*primary_700', |
|
|
button_primary_background_fill_hover_dark='*primary_400', |
|
|
button_primary_border_color='transparent', |
|
|
button_primary_border_color_dark='transparent', |
|
|
button_primary_text_color='white', |
|
|
button_primary_text_color_dark='white', |
|
|
button_secondary_background_fill='*neutral_200', |
|
|
button_secondary_background_fill_dark='*neutral_600', |
|
|
button_secondary_background_fill_hover='*neutral_300', |
|
|
button_secondary_background_fill_hover_dark='*neutral_500', |
|
|
button_secondary_border_color='transparent', |
|
|
button_secondary_border_color_dark='transparent', |
|
|
button_secondary_text_color='*neutral_700', |
|
|
button_secondary_text_color_dark='*neutral_200' |
|
|
) |
|
|
|
|
|
with gr.Blocks(title="GitRepo Inspector", theme=theme, css=css_code, head=APP_HEAD) as app: |
|
|
|
|
|
|
|
|
current_sort = gr.State(DEFAULT_SORT) |
|
|
issues_cache = gr.State(pd.DataFrame()) |
|
|
current_page = gr.State(1) |
|
|
sel_issue_state = gr.State() |
|
|
sel_repo_state = gr.State() |
|
|
agent_state = gr.State(value=None) |
|
|
|
|
|
with gr.Sidebar(position="left", open=True, label="Settings"): |
|
|
gr.Markdown("### ⚙️ Configuration") |
|
|
|
|
|
|
|
|
repo_choices = get_repo_choices() |
|
|
global_repo_url = gr.Dropdown( |
|
|
elem_classes="custom-dropdown", |
|
|
choices=repo_choices, |
|
|
value=DEFAULT_REPO, |
|
|
label="Repository", |
|
|
allow_custom_value=True, |
|
|
interactive=True, |
|
|
info="Select existing or type new URL" |
|
|
) |
|
|
with gr.Row(): |
|
|
sync_repo_btn = ButtonPlus("🔄 Sync This Repo", help="Update Repository Issues Cache. This may take a while.") |
|
|
sync_status = gr.Markdown(visible=True, value="Ready to sync.") |
|
|
|
|
|
global_github_token = gr.Textbox( |
|
|
label="GitHub Personal Token", |
|
|
type="password", |
|
|
placeholder="ghp_...", |
|
|
info="Required for 'Approve' and 'Prioritize'" |
|
|
) |
|
|
|
|
|
gr.Markdown("---") |
|
|
gr.Markdown("### 🧠 AI Model Configuration") |
|
|
gr.Markdown("(For Manual Actions)") |
|
|
|
|
|
default_provider = "gemini" |
|
|
global_provider = gr.Dropdown( |
|
|
choices=list(AVAILABLE_MODELS_BY_PROVIDER.keys()), |
|
|
value=default_provider, |
|
|
label="LLM Provider" |
|
|
) |
|
|
|
|
|
|
|
|
global_model = gr.Dropdown( |
|
|
choices=AVAILABLE_MODELS_BY_PROVIDER[default_provider], |
|
|
value=AVAILABLE_MODELS_BY_PROVIDER[default_provider][0], |
|
|
label="Model Name", |
|
|
allow_custom_value=True, |
|
|
interactive=True |
|
|
) |
|
|
|
|
|
global_llm_key = gr.Textbox( |
|
|
label="LLM API Key", |
|
|
type="password", |
|
|
placeholder="your API Key...", |
|
|
info="Required for approval and reanalysis actions" |
|
|
) |
|
|
global_gemini_key = gr.Textbox( |
|
|
label="Gemini Assistant API Key", |
|
|
type="password", |
|
|
placeholder="your API Key...", |
|
|
info="Gemini API Key required for AI Assistant Chat" |
|
|
) |
|
|
gr.Markdown("---") |
|
|
log_limit_slider = gr.Slider( |
|
|
minimum=10, |
|
|
maximum=200, |
|
|
value=50, |
|
|
step=10, |
|
|
label="Log Lines to Fetch" |
|
|
) |
|
|
|
|
|
gr.HTML(_get_custom_header()) |
|
|
|
|
|
|
|
|
with gr.Row(): |
|
|
with gr.Column(scale=1): |
|
|
pass |
|
|
with gr.Column(scale=0, min_width=150): |
|
|
refresh_global = gr.Button("🔄 Refresh Data", variant="secondary") |
|
|
|
|
|
with gr.Sidebar(position="right", open=False, width=400): |
|
|
with gr.Column(scale=1, elem_classes="column-container", min_width=400): |
|
|
gr.Markdown("### 🤖 AI Assistant") |
|
|
with gr.Column(scale=2): |
|
|
chatbot = gr.Chatbot( |
|
|
elem_classes="chatbot-container", |
|
|
type="messages", |
|
|
height=500, |
|
|
show_label=False, |
|
|
show_copy_button=True, |
|
|
avatar_images=( |
|
|
"https://huggingface.co/datasets/DEVAIEXP/assets/resolve/main/user.png", |
|
|
"https://huggingface.co/datasets/DEVAIEXP/assets/resolve/main/gemini-icon.png" |
|
|
) |
|
|
) |
|
|
with gr.Row(): |
|
|
msg = gr.Textbox(label="Message", max_lines=4, scale=4, placeholder="Ask: 'What's up with issue #123?'") |
|
|
send_btn = gr.Button("Send", variant="primary", scale=1) |
|
|
with gr.Row(): |
|
|
clear_chat_btn = gr.Button("🧹 Clear History") |
|
|
|
|
|
with gr.Row(max_height=300): |
|
|
gr.Examples(elem_id="chat-examples", |
|
|
examples=[ |
|
|
["Whats up with issue 12021?"], |
|
|
["Which repository are we analyzing right now?"], |
|
|
["List 3 open issues about 'chatbot'."], |
|
|
["Are there any issues marked as 'duplicate' by the AI?"], |
|
|
["Show me issues created by user 'abidlabs' (or any user you know exists)."], |
|
|
["Show me the full report for issue #12314"], |
|
|
["Please analyze issue #12264 again right now"], |
|
|
["Check issue #12044 using the 'openai' provider and model 'gpt-4o'"], |
|
|
["Analyze issue #12432 using 'gpt-oss-120b'"], |
|
|
["Is issue #11910 a duplicate? Check now."], |
|
|
["Find the most recent open issue about 'bug' and tell me if it's already resolved."], |
|
|
["Do we have any duplicates related to 'audio'? If yes, which is the original issue?"], |
|
|
["I'm seeing an error '422 Unprocessable Entity'. Is this a known issue in the repo?"], |
|
|
["What is the latest version of Gradio library?"], |
|
|
["This issue mentions 'Pydantic v2'. What is the migration guide for that?"], |
|
|
], |
|
|
inputs=[msg], |
|
|
label="Message Examples", |
|
|
) |
|
|
chat_trigger_issue = gr.Number(visible=False) |
|
|
def clear_chat(): |
|
|
return [] |
|
|
|
|
|
clear_chat_btn.click( |
|
|
fn=clear_chat, |
|
|
inputs=None, |
|
|
outputs=[chatbot] |
|
|
) |
|
|
def init_agent(gemini_api_key): |
|
|
return create_dashboard_agent(gemini_api_key) |
|
|
|
|
|
|
|
|
def interact(agent, prompt, history, current_repo, provider, model, token, gemini_api_key, request: gr.Request=None): |
|
|
agent_state = agent |
|
|
if agent_state is None: |
|
|
_validate_api_keys(gemini_api_key, request, "Gemini Assistant") |
|
|
gr.Info("⚡ Please wait, initializing the agent for the first time...") |
|
|
agent_state = init_agent(gemini_api_key) |
|
|
if agent_state is None: |
|
|
history.append(gr.ChatMessage(role="user", content=prompt, metadata={"status": "done"})) |
|
|
history.append(gr.ChatMessage( |
|
|
role="assistant", |
|
|
content="❌ Failed to initialize agent", |
|
|
metadata={"status": "done"} |
|
|
)) |
|
|
yield history, "", gr.skip(), agent_state |
|
|
return |
|
|
time.sleep(2) |
|
|
|
|
|
context_prompt = f""" |
|
|
User is looking at repository: '{current_repo}' |
|
|
Selected analysis provider: '{provider}' |
|
|
Selected model: '{model}' |
|
|
GitHub Token provided: {'Yes' if token else 'No'} |
|
|
|
|
|
User Query: {prompt} |
|
|
""" |
|
|
history.append(gr.ChatMessage(role="user", content=prompt)) |
|
|
yield history, "", gr.skip(), agent_state |
|
|
|
|
|
full_response = "" |
|
|
detected_issue = gr.skip(), agent_state |
|
|
|
|
|
if agent_state: |
|
|
try: |
|
|
for chunk in stream_to_gradio(agent_state, context_prompt): |
|
|
if isinstance(chunk, gr.ChatMessage): |
|
|
history.append(chunk) |
|
|
elif isinstance(chunk, str): |
|
|
if history and history[-1].role == "assistant": |
|
|
history[-1].content += chunk |
|
|
else: |
|
|
history.append(gr.ChatMessage(role="assistant", content=chunk)) |
|
|
|
|
|
content = chunk.content if isinstance(chunk, gr.ChatMessage) else chunk |
|
|
full_response += str(content) |
|
|
yield history, "", gr.skip(), agent_state |
|
|
|
|
|
|
|
|
|
|
|
match = re.search(r"\[VIEW:#(\d+)\]\s*$", full_response, re.MULTILINE) |
|
|
detected_issue = int(match.group(1)) if match else None |
|
|
if match: |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
last_msg = history[-1] |
|
|
|
|
|
if isinstance(last_msg, gr.ChatMessage): |
|
|
clean_content = re.sub(r"\[VIEW:#\d+\]", "", last_msg.content).strip() |
|
|
last_msg.content = clean_content |
|
|
elif isinstance(last_msg, dict): |
|
|
last_msg['content'] = re.sub(r"\[VIEW:#\d+\]", "", last_msg['content']).strip() |
|
|
|
|
|
yield history, "", detected_issue, agent_state |
|
|
|
|
|
except Exception as e: |
|
|
history.append(gr.ChatMessage(role="assistant", content=f"Error: {e}")) |
|
|
yield history, "", gr.skip(), agent_state |
|
|
else: |
|
|
history.append(gr.ChatMessage(role="assistant", content="Agent failed to initialize.")) |
|
|
yield history, "", gr.skip(), agent_state |
|
|
|
|
|
submit_event = gr.on( |
|
|
triggers=[send_btn.click, msg.submit], |
|
|
fn=interact, |
|
|
inputs=[ |
|
|
agent_state, |
|
|
msg, |
|
|
chatbot, |
|
|
global_repo_url, |
|
|
global_provider, |
|
|
global_model, |
|
|
global_github_token, |
|
|
global_gemini_key |
|
|
], |
|
|
outputs=[chatbot, msg, chat_trigger_issue, agent_state], |
|
|
show_progress="hidden" |
|
|
) |
|
|
|
|
|
with gr.Row(): |
|
|
with gr.Column(scale=3, elem_classes="column-container"): |
|
|
|
|
|
with gr.Tabs(): |
|
|
with gr.TabItem("📋 Issues Overview"): |
|
|
with gr.Row(): |
|
|
|
|
|
with gr.Column(): |
|
|
view_filter = gr.Radio( |
|
|
["Action Required", "All Issues"], |
|
|
value="Action Required", |
|
|
label="View Filter", |
|
|
show_label=False, |
|
|
container=False |
|
|
) |
|
|
html_table_output = HTMLPlus( |
|
|
label="Issue List", |
|
|
selectable_elements=["th", "tr"] |
|
|
) |
|
|
with gr.Row(elem_id="pagination-row"): |
|
|
btn_prev = gr.Button("◀", elem_classes="pagination-btn", size="sm") |
|
|
page_display = gr.Markdown("Page 1 of 1", elem_id="page_label") |
|
|
btn_next = gr.Button("▶", elem_classes="pagination-btn", size="sm") |
|
|
|
|
|
|
|
|
with gr.Group(visible=True): |
|
|
with gr.Accordion("📄 Issue Analysis Detail", open=False, visible=False) as issue_analysis_accordion: |
|
|
with gr.Row(): |
|
|
with gr.Column(scale=3): |
|
|
detail_view = gr.HTML(label="Full Report") |
|
|
|
|
|
with gr.Column(scale=1, elem_classes="action-console-col"): |
|
|
with gr.Group() as action_group: |
|
|
gr.Markdown("#### ⚡ Action Console") |
|
|
action_summary = gr.Markdown("Select an issue.") |
|
|
action_comment_input = gr.Textbox(label="Proposed Comment", lines=3, interactive=True) |
|
|
|
|
|
reanalyze_btn = gr.Button("🔄 Re-Analyze Issue") |
|
|
|
|
|
with gr.Column(visible=False) as auth_box: |
|
|
approve_btn = gr.Button("✅ Approve Action", variant="primary") |
|
|
exec_status = gr.Markdown() |
|
|
with gr.Accordion("🧠 Agent Thought Process", open=False, visible=False) as thought_accordion: |
|
|
thought_view = gr.Markdown("Select an issue to see the reasoning.") |
|
|
gr.Markdown("---") |
|
|
|
|
|
gr.Markdown("### 📊 Analytics Overview") |
|
|
with gr.Row(): |
|
|
with gr.Column(): |
|
|
stats_plot = gr.Plot(label="Distribution", show_label=False) |
|
|
with gr.Column(): |
|
|
funnel_plot = gr.Plot(label="Efficiency", show_label=False) |
|
|
with gr.Column(): |
|
|
timeline_plot = gr.Plot(label="Timeline", show_label=False) |
|
|
|
|
|
with gr.TabItem("🥇 Prioritize"): |
|
|
gr.Markdown("### Strategic Backlog Prioritization") |
|
|
with gr.Row(): |
|
|
prio_btn = gr.Button("Generate Strategy Report 🧠", variant="primary") |
|
|
print_btn = gr.Button("🖨️ Print Report", variant="secondary") |
|
|
with gr.Group(): |
|
|
prio_out = gr.HTML(label="AI Strategy Report", elem_id="prio-report") |
|
|
|
|
|
|
|
|
with BottomBar(label="🤖 AI Assistant", bring_to_front=False, height=320, open=True, rounded_borders=True): |
|
|
gr.Markdown("### 📊 Workflow Agent Activity Logs", elem_id="logs-header") |
|
|
with gr.Row(): |
|
|
auto_refresh_timer = gr.Timer(value=30, active=True) |
|
|
with gr.Column(scale=1): |
|
|
refresh_interval = gr.Slider( |
|
|
info="Auto-refresh interval", |
|
|
minimum=30, |
|
|
maximum=300, |
|
|
value=30, |
|
|
step=30, |
|
|
label="Auto-Refresh (seconds)", |
|
|
interactive=True, |
|
|
) |
|
|
with gr.Column(scale=6): |
|
|
activity_log = gr.HTML(label="Trace Log", elem_id="trace-log") |
|
|
|
|
|
|
|
|
trigger_outputs = [ |
|
|
detail_view, |
|
|
action_comment_input, |
|
|
exec_status, |
|
|
auth_box, |
|
|
reanalyze_btn, |
|
|
action_summary, |
|
|
thought_view, |
|
|
issue_analysis_accordion, |
|
|
thought_accordion, |
|
|
chat_trigger_issue |
|
|
] |
|
|
def handle_chat_view_trigger(issue_number, repo_url): |
|
|
if not issue_number: |
|
|
return tuple(gr.skip() for _ in range(len(trigger_outputs))) |
|
|
|
|
|
|
|
|
time.sleep(3) |
|
|
|
|
|
|
|
|
details = load_issue_details(issue_number, repo_url) |
|
|
return details + (None,) |
|
|
|
|
|
chat_trigger_issue.change( |
|
|
fn=handle_chat_view_trigger, |
|
|
inputs=[chat_trigger_issue, global_repo_url], |
|
|
outputs=trigger_outputs |
|
|
) |
|
|
|
|
|
|
|
|
def hard_refresh(filter_val, sort_val, log_lim, repo_url=None): |
|
|
repo = repo_url if repo_url else "https://github.com/gradio-app/gradio" |
|
|
donut_fig, funnel_fig, timeline_fig, full_df, logs = fetch_fresh_data_to_cache(log_lim, repo) |
|
|
html, label, page = render_from_cache(full_df, filter_val, sort_val, 1) |
|
|
|
|
|
return ( |
|
|
donut_fig, |
|
|
timeline_fig, |
|
|
funnel_fig, |
|
|
html, |
|
|
logs, |
|
|
full_df, |
|
|
label, |
|
|
page, |
|
|
|
|
|
"", |
|
|
"Select an issue.", |
|
|
gr.update(visible=False), |
|
|
"", |
|
|
gr.update(visible=False), |
|
|
gr.update(visible=False, open=False), |
|
|
"" |
|
|
) |
|
|
|
|
|
common_outputs = [ |
|
|
stats_plot, |
|
|
funnel_plot, |
|
|
timeline_plot, |
|
|
html_table_output, |
|
|
activity_log, |
|
|
issues_cache, |
|
|
page_display, |
|
|
current_page, |
|
|
detail_view, |
|
|
action_summary, |
|
|
auth_box, |
|
|
exec_status, |
|
|
thought_accordion, |
|
|
issue_analysis_accordion, |
|
|
thought_view |
|
|
] |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
app.load( |
|
|
fn=hard_refresh, |
|
|
inputs=[view_filter, current_sort, log_limit_slider, global_repo_url], |
|
|
outputs=common_outputs, |
|
|
show_progress_on=[ |
|
|
html_table_output, |
|
|
stats_plot, |
|
|
funnel_plot, |
|
|
timeline_plot, |
|
|
] |
|
|
).success( |
|
|
fn=None, |
|
|
js="() => window.convert_timestamps()" |
|
|
) |
|
|
|
|
|
refresh_global.click( |
|
|
fn=hard_refresh, |
|
|
inputs=[view_filter, current_sort, log_limit_slider, global_repo_url], |
|
|
outputs=common_outputs, |
|
|
show_progress_on=[ |
|
|
html_table_output, |
|
|
stats_plot, |
|
|
funnel_plot, |
|
|
timeline_plot, |
|
|
] |
|
|
).then( |
|
|
fn=None, |
|
|
js="() => window.convert_timestamps()" |
|
|
) |
|
|
def quick_log_refresh(log_lim): |
|
|
df = db.fetch_agent_logs(limit=log_lim) |
|
|
return _format_logs_to_html(df) |
|
|
|
|
|
auto_refresh_timer.tick( |
|
|
fn=quick_log_refresh, |
|
|
inputs=[log_limit_slider], |
|
|
outputs=[activity_log] |
|
|
).then( |
|
|
fn=None, |
|
|
js="() => window.convert_timestamps()" |
|
|
) |
|
|
|
|
|
|
|
|
refresh_interval.change( |
|
|
fn=lambda x: gr.Timer(value=x), |
|
|
inputs=[refresh_interval], |
|
|
outputs=[auto_refresh_timer] |
|
|
) |
|
|
|
|
|
def update_dashboard_models(provider): |
|
|
models = AVAILABLE_MODELS_BY_PROVIDER.get(provider, []) |
|
|
|
|
|
new_val = models[0] if models else None |
|
|
return gr.update(choices=models, value=new_val) |
|
|
|
|
|
global_provider.change( |
|
|
fn=update_dashboard_models, |
|
|
inputs=[global_provider], |
|
|
outputs=[global_model] |
|
|
) |
|
|
|
|
|
def soft_update(df, filter_val, sort_val): |
|
|
html, label, page = render_from_cache(df, filter_val, sort_val, 1) |
|
|
return html, label, page |
|
|
|
|
|
view_filter.change(fn=soft_update, inputs=[issues_cache, view_filter, current_sort], outputs=[html_table_output, page_display, current_page]) |
|
|
|
|
|
def change_page(direction, current, df, filter_val, sort_val): |
|
|
new_page = current + direction |
|
|
html, label, final_page = render_from_cache(df, filter_val, sort_val, new_page) |
|
|
return html, label, final_page |
|
|
|
|
|
btn_prev.click(fn=lambda p, df, f, s: change_page(-1, p, df, f, s), inputs=[current_page, issues_cache, view_filter, current_sort], outputs=[html_table_output, page_display, current_page]) |
|
|
btn_next.click(fn=lambda p, df, f, s: change_page(1, p, df, f, s), inputs=[current_page, issues_cache, view_filter, current_sort], outputs=[html_table_output, page_display, current_page]) |
|
|
|
|
|
|
|
|
def handle_table_interaction(evt: gr.SelectData, df_cache, view_filter, sort_data): |
|
|
data = evt.value |
|
|
if evt.index == "th": |
|
|
clicked_col = data.get('sortCol') or data.get('sort-col') |
|
|
if not clicked_col: return gr.skip() |
|
|
new_asc = not sort_data['asc'] if sort_data['col'] == clicked_col else False |
|
|
new_sort = {"col": clicked_col, "asc": new_asc} |
|
|
html, _, _ = render_from_cache(df_cache, view_filter, new_sort, page=1) |
|
|
return ( |
|
|
new_sort, |
|
|
html, |
|
|
gr.skip(), |
|
|
gr.skip(), |
|
|
gr.skip(), |
|
|
gr.skip(), |
|
|
gr.skip(), |
|
|
gr.skip(), |
|
|
gr.skip(), |
|
|
gr.skip(), |
|
|
gr.skip(), |
|
|
gr.skip(), |
|
|
gr.skip(), |
|
|
) |
|
|
elif evt.index == "tr": |
|
|
issue_num = data.get('issueNumber') or data.get('issue-number') |
|
|
repo_url = data.get('repoUrl') or data.get('repo-url') |
|
|
if not issue_num: |
|
|
return gr.skip() |
|
|
report, action_text, status, auth_vis, reanalyze_vis, summary_text, thought_text, issue_accordion, thought_accordion = load_issue_details(issue_num, repo_url) |
|
|
|
|
|
return ( |
|
|
gr.skip(), |
|
|
gr.skip(), |
|
|
report, |
|
|
action_text, |
|
|
auth_vis, |
|
|
issue_num, |
|
|
repo_url, |
|
|
"", |
|
|
reanalyze_vis, |
|
|
thought_accordion, |
|
|
issue_accordion, |
|
|
summary_text, |
|
|
thought_text |
|
|
) |
|
|
return gr.skip() |
|
|
|
|
|
html_table_output.select( |
|
|
fn=handle_table_interaction, |
|
|
inputs=[issues_cache, view_filter, current_sort], |
|
|
outputs=[ |
|
|
current_sort, |
|
|
html_table_output, |
|
|
detail_view, |
|
|
action_comment_input, |
|
|
auth_box, |
|
|
sel_issue_state, |
|
|
sel_repo_state, |
|
|
exec_status, |
|
|
reanalyze_btn, |
|
|
thought_accordion, |
|
|
issue_analysis_accordion, |
|
|
action_summary, |
|
|
thought_view |
|
|
], |
|
|
show_progress_on=[issue_analysis_accordion] |
|
|
) |
|
|
|
|
|
|
|
|
approve_btn.click( |
|
|
fn=execute_approval_workflow, |
|
|
inputs=[sel_issue_state, sel_repo_state, global_github_token, action_comment_input], |
|
|
outputs=[exec_status] |
|
|
).success( |
|
|
fn=hard_refresh, |
|
|
inputs=[view_filter, current_sort, log_limit_slider, global_repo_url], |
|
|
outputs=common_outputs, |
|
|
show_progress_on=[ |
|
|
html_table_output, |
|
|
stats_plot, |
|
|
funnel_plot, |
|
|
timeline_plot, |
|
|
] |
|
|
) |
|
|
|
|
|
prio_btn.click( |
|
|
fn=generate_priority_report, |
|
|
inputs=[global_repo_url, global_provider, global_model, global_llm_key], |
|
|
outputs=[prio_out], |
|
|
show_progress="hidden" |
|
|
) |
|
|
print_btn.click(fn=None, js="() => window.print_report()") |
|
|
global_repo_url.change( |
|
|
fn=hard_refresh, |
|
|
inputs=[view_filter, current_sort, log_limit_slider, global_repo_url], |
|
|
outputs=common_outputs, |
|
|
show_progress_on=[ |
|
|
html_table_output, |
|
|
stats_plot, |
|
|
funnel_plot, |
|
|
timeline_plot, |
|
|
] |
|
|
) |
|
|
|
|
|
sync_repo_btn.click( |
|
|
fn=trigger_sync_action, |
|
|
inputs=[global_repo_url, global_github_token], |
|
|
outputs=[sync_status], |
|
|
show_progress="hidden", |
|
|
concurrency_limit=10 |
|
|
).success( |
|
|
fn=hard_refresh, |
|
|
inputs=[view_filter, current_sort, log_limit_slider, global_repo_url], |
|
|
outputs=common_outputs, |
|
|
show_progress_on=[ |
|
|
html_table_output, |
|
|
stats_plot, |
|
|
funnel_plot, |
|
|
timeline_plot, |
|
|
] |
|
|
) |
|
|
reanalyze_btn.click( |
|
|
fn=trigger_manual_reanalysis, |
|
|
inputs=[ |
|
|
sel_issue_state, |
|
|
sel_repo_state, |
|
|
global_provider, |
|
|
global_model, |
|
|
global_github_token, |
|
|
global_llm_key |
|
|
], |
|
|
outputs=[exec_status] |
|
|
).success( |
|
|
fn=hard_refresh, |
|
|
inputs=[view_filter, current_sort, log_limit_slider, global_repo_url], |
|
|
outputs=common_outputs, |
|
|
show_progress_on=[ |
|
|
html_table_output, |
|
|
stats_plot, |
|
|
funnel_plot, |
|
|
timeline_plot, |
|
|
] |
|
|
) |
|
|
|
|
|
if __name__ == "__main__": |
|
|
app.launch() |
|
|
|