Spaces:
Sleeping
Sleeping
| import os | |
| import gradio as gr | |
| from dotenv import load_dotenv | |
| import asyncio | |
| import traceback | |
| # ... (keep other imports) | |
| # Load .env file | |
| load_dotenv() | |
| # ... (keep other imports and function definitions) | |
| # Add this new function to load and use the Hugging Face space | |
| def load_hf_space(): | |
| HF_TOKEN = os.environ.get("HF_TOKEN") | |
| try: | |
| interface = gr.Interface.load( | |
| name="jjia11/MentorMatch-Demo", | |
| src="huggingface", | |
| hf_token=HF_TOKEN, | |
| ) | |
| print("Hugging Face space loaded successfully") | |
| return interface | |
| except Exception as e: | |
| print(f"Error loading Hugging Face space: {str(e)}") | |
| return None | |
| # Load the Hugging Face space | |
| hf_space = load_hf_space() | |
| # Modify the process_cv_wrapper function to use the Hugging Face space | |
| def process_cv_wrapper(file, num_candidates): | |
| async def async_wrapper(): | |
| try: | |
| if hf_space is None: | |
| raise ValueError("Hugging Face space is not loaded") | |
| # Use the Hugging Face space to process the CV | |
| result = hf_space.predict( | |
| file.name, # Assuming the space expects a file path | |
| num_candidates, | |
| api_name="/process_cv" # Replace with your actual API endpoint | |
| ) | |
| # Assuming the result contains the expected outputs | |
| mentee_summary, mentor_table_html, evaluated_matches, csv_data = result | |
| return mentee_summary, mentor_table_html, evaluated_matches, csv_data | |
| except Exception as e: | |
| print(f"An error occurred: {str(e)}") | |
| print("Traceback:") | |
| print(traceback.format_exc()) | |
| return "Error occurred", "Error occurred", [], [] | |
| return asyncio.run(async_wrapper()) | |
| # Modify the chat_query function to use the Hugging Face space | |
| async def chat_query(message, history, index_choice): | |
| try: | |
| if hf_space is None: | |
| raise ValueError("Hugging Face space is not loaded") | |
| # Use the Hugging Face space for chat functionality | |
| response = hf_space.predict( | |
| message, | |
| history, | |
| index_choice, | |
| api_name="/chat" # Replace with your actual chat API endpoint | |
| ) | |
| # Assuming the response is already in the correct format | |
| return response, "" | |
| except Exception as e: | |
| print(f"An error occurred in chat: {str(e)}") | |
| return history + [[message, "An error occurred. Please try again."]], "" | |
| # Gradio interface | |
| with gr.Blocks() as demo: | |
| gr.HTML("<h1>TCH Mentor-Mentee Matching System</h1>") | |
| with gr.Tab("Mentor Search"): | |
| with gr.Row(): | |
| with gr.Column(scale=1): | |
| file = gr.File(label="Upload Mentee CV (PDF)") | |
| with gr.Column(scale=1): | |
| num_candidates = gr.Number(label="Number of Candidates", value=5, minimum=1, maximum=100, step=1) | |
| submit_btn = gr.Button("Submit") | |
| summary = gr.Textbox(label="Student CV Summary") | |
| mentor_table = gr.HTML(label="Matching Mentors Table", value="<div style='height: 500px;'>Results will appear here after submission.</div>") | |
| download_btn = gr.Button("Download Results as CSV") | |
| evaluated_matches = gr.State([]) | |
| csv_data = gr.State([]) | |
| submit_btn.click( | |
| fn=process_cv_wrapper, | |
| inputs=[file, num_candidates], | |
| outputs=[summary, mentor_table, evaluated_matches, csv_data], | |
| show_progress=True | |
| ) | |
| download_btn.click( | |
| fn=download_csv, | |
| inputs=[csv_data], | |
| outputs=gr.File(label="Download CSV", height=30), | |
| show_progress=False, | |
| ) | |
| with gr.Tab("Chat"): | |
| chatbot = gr.Chatbot() | |
| msg = gr.Textbox(label="Type your message here...") | |
| clear = gr.Button("Clear Chat") | |
| chat_index_choice = gr.Dropdown( | |
| choices=["Assistant Professors and Above", "Above Assistant Professors"], | |
| label="Select Index for Chat", | |
| value="Assistant Professors and Above" | |
| ) | |
| msg.submit(chat_query, inputs=[msg, chatbot, chat_index_choice], outputs=[chatbot, msg]) | |
| clear.click(lambda: ([], ""), outputs=[chatbot, msg]) | |
| if __name__ == "__main__": | |
| demo.queue() | |
| demo.launch(share=True) | |