import gradio as gr from transformers import pipeline from datetime import datetime import sqlite3 import os import random # ------------------------------------------------- # 🎯 MODEL INITIALIZATION # ------------------------------------------------- MODEL_NAME = "superb/hubert-large-superb-er" emotion_classifier = pipeline("audio-classification", model=MODEL_NAME) # (Optional) Simulated DB path os.makedirs("data", exist_ok=True) DB_PATH = "data/scm_emotion.db" # ------------------------------------------------- # 😄 EMOTION MAP + COLORS # ------------------------------------------------- EMOTION_MAP = { "ang": ("Angry", "😡", "#ff4d4d"), "hap": ("Happy", "😄", "#4caf50"), "neu": ("Neutral", "😐", "#9e9e9e"), "sad": ("Sad", "😢", "#2196f3"), "exc": ("Excited", "🤩", "#ff9800"), "fru": ("Frustrated", "😤", "#f44336"), "fea": ("Fearful", "😨", "#673ab7"), "sur": ("Surprised", "😲", "#00bcd4"), "dis": ("Disgusted", "🤢", "#8bc34a"), } # ------------------------------------------------- # 🧠 EMOTION ANALYSIS # ------------------------------------------------- def analyze_emotion(audio, team, purpose): if audio is None: return "

⚠️ Please record or upload audio.

", None # If team/purpose not filled if team == "Other": team = "Custom/Unspecified" if purpose == "Other": purpose = "Custom/Unspecified" results = emotion_classifier(audio) results = sorted(results, key=lambda x: x['score'], reverse=True) top = results[0] label, emoji, color = EMOTION_MAP.get(top['label'], (top['label'], "🎭", "#607d8b")) score = round(top['score'] * 100, 2) # Dashboard HTML dashboard_html = f"""

🏢 {team} | 🎯 {purpose}

{emoji} {label.upper()} — {score}%

Detected Emotion Intensity (Confidence)


📊 Full Emotion Breakdown


" # Insight insights = random.choice([ "🧩 Team seems calm and balanced today. Great stability!", "⚡ Slight emotional tension detected. Consider quick sync-up meetings.", "💬 High positive tone — keep up the good energy!", "🚨 Stress indicators detected. HR may follow up proactively.", "📈 Emotion variation is rising — review workloads or deadlines." ]) insight_html = f"""

🧠 AI Insight

{insights}

(Demo mode — not stored to any database)

""" return dashboard_html, insight_html # ------------------------------------------------- # 🎨 GRADIO INTERFACE # ------------------------------------------------- DEPARTMENTS = [ "Procurement", "Logistics", "Planning", "Inventory", "Distribution", "HR", "Other" ] PURPOSES = [ "HR Meeting", "Team Stand-up", "One-on-One", "Customer Call", "Interview", "Other" ] disclaimer_html = """
💡

Demo Disclaimer

This is a demonstration version created for HR and leadership showcases.
Your voice data is NOT being saved or shared anywhere.
Database logging is simulated — you can record or upload audio freely.
✅ Safe to explore and test in this environment.

""" with gr.Blocks(theme=gr.themes.Soft()) as app: gr.HTML("""

🎧 SCM Emotion Intelligence Dashboard

Analyze live or uploaded audio from meetings to understand emotional tone within teams.
Built for HR & Managers to assess engagement and team well-being.

""") with gr.Row(): # Left Panel: Inputs with gr.Column(scale=1, min_width=350): audio_input = gr.Audio( sources=["microphone", "upload"], type="filepath", label="🎙️ Record or Upload Audio", ) team_input = gr.Dropdown(DEPARTMENTS, label="🏢 Select Team / Department") purpose_input = gr.Dropdown(PURPOSES, label="🎯 Purpose of Audio") analyze_btn = gr.Button("🚀 Analyze Emotion", variant="primary") gr.HTML(disclaimer_html) # Right Panel: Sticky Output with gr.Column(scale=2, min_width=500): output_html = gr.HTML() insight_html = gr.HTML() analyze_btn.click( fn=analyze_emotion, inputs=[audio_input, team_input, purpose_input], outputs=[output_html, insight_html], ) gr.HTML("""

💾 (Demo Mode) Database integration coming soon for Power BI visualization.

""") if __name__ == "__main__": app.launch(server_name="0.0.0.0", server_port=7860)