jjia11 commited on
Commit
be9ab96
·
verified ·
1 Parent(s): 78b9e15

Upload folder using huggingface_hub

Browse files
Files changed (5) hide show
  1. .gitignore +5 -0
  2. README.md +2 -8
  3. __init__.py +0 -0
  4. main.py +127 -0
  5. requirements.txt +17 -0
.gitignore ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ .env
2
+ simulated_data/
3
+ __pycache__/
4
+ *.pyc
5
+ flagged/
README.md CHANGED
@@ -1,12 +1,6 @@
1
  ---
2
- title: MentorMatcher Demo Public
3
- emoji: 🏆
4
- colorFrom: blue
5
- colorTo: pink
6
  sdk: gradio
7
  sdk_version: 4.44.0
8
- app_file: app.py
9
- pinned: false
10
  ---
11
-
12
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
1
  ---
2
+ title: MentorMatcher-Demo-Public
3
+ app_file: app.py
 
 
4
  sdk: gradio
5
  sdk_version: 4.44.0
 
 
6
  ---
 
 
__init__.py ADDED
File without changes
main.py ADDED
@@ -0,0 +1,127 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import gradio as gr
3
+ from dotenv import load_dotenv
4
+ import asyncio
5
+ import traceback
6
+ # ... (keep other imports)
7
+
8
+ # Load .env file
9
+ load_dotenv()
10
+
11
+ # ... (keep other imports and function definitions)
12
+
13
+ # Add this new function to load and use the Hugging Face space
14
+ def load_hf_space():
15
+ HF_TOKEN = os.environ.get("HF_TOKEN")
16
+ try:
17
+ interface = gr.Interface.load(
18
+ name="jjia11/MentorMatch-Demo",
19
+ src="huggingface",
20
+ hf_token=HF_TOKEN,
21
+ )
22
+ print("Hugging Face space loaded successfully")
23
+ return interface
24
+ except Exception as e:
25
+ print(f"Error loading Hugging Face space: {str(e)}")
26
+ return None
27
+
28
+ # Load the Hugging Face space
29
+ hf_space = load_hf_space()
30
+
31
+ # Modify the process_cv_wrapper function to use the Hugging Face space
32
+ def process_cv_wrapper(file, num_candidates):
33
+ async def async_wrapper():
34
+ try:
35
+ if hf_space is None:
36
+ raise ValueError("Hugging Face space is not loaded")
37
+
38
+ # Use the Hugging Face space to process the CV
39
+ result = hf_space.predict(
40
+ file.name, # Assuming the space expects a file path
41
+ num_candidates,
42
+ api_name="/process_cv" # Replace with your actual API endpoint
43
+ )
44
+
45
+ # Assuming the result contains the expected outputs
46
+ mentee_summary, mentor_table_html, evaluated_matches, csv_data = result
47
+
48
+ return mentee_summary, mentor_table_html, evaluated_matches, csv_data
49
+ except Exception as e:
50
+ print(f"An error occurred: {str(e)}")
51
+ print("Traceback:")
52
+ print(traceback.format_exc())
53
+ return "Error occurred", "Error occurred", [], []
54
+
55
+ return asyncio.run(async_wrapper())
56
+
57
+ # Modify the chat_query function to use the Hugging Face space
58
+ async def chat_query(message, history, index_choice):
59
+ try:
60
+ if hf_space is None:
61
+ raise ValueError("Hugging Face space is not loaded")
62
+
63
+ # Use the Hugging Face space for chat functionality
64
+ response = hf_space.predict(
65
+ message,
66
+ history,
67
+ index_choice,
68
+ api_name="/chat" # Replace with your actual chat API endpoint
69
+ )
70
+
71
+ # Assuming the response is already in the correct format
72
+ return response, ""
73
+ except Exception as e:
74
+ print(f"An error occurred in chat: {str(e)}")
75
+ return history + [[message, "An error occurred. Please try again."]], ""
76
+
77
+ # Gradio interface
78
+ with gr.Blocks() as demo:
79
+ gr.HTML("<h1>TCH Mentor-Mentee Matching System</h1>")
80
+
81
+ with gr.Tab("Mentor Search"):
82
+ with gr.Row():
83
+ with gr.Column(scale=1):
84
+ file = gr.File(label="Upload Mentee CV (PDF)")
85
+
86
+ with gr.Column(scale=1):
87
+ num_candidates = gr.Number(label="Number of Candidates", value=5, minimum=1, maximum=100, step=1)
88
+ submit_btn = gr.Button("Submit")
89
+
90
+ summary = gr.Textbox(label="Student CV Summary")
91
+ mentor_table = gr.HTML(label="Matching Mentors Table", value="<div style='height: 500px;'>Results will appear here after submission.</div>")
92
+ download_btn = gr.Button("Download Results as CSV")
93
+
94
+ evaluated_matches = gr.State([])
95
+ csv_data = gr.State([])
96
+
97
+ submit_btn.click(
98
+ fn=process_cv_wrapper,
99
+ inputs=[file, num_candidates],
100
+ outputs=[summary, mentor_table, evaluated_matches, csv_data],
101
+ show_progress=True
102
+ )
103
+
104
+ download_btn.click(
105
+ fn=download_csv,
106
+ inputs=[csv_data],
107
+ outputs=gr.File(label="Download CSV", height=30),
108
+ show_progress=False,
109
+ )
110
+
111
+ with gr.Tab("Chat"):
112
+ chatbot = gr.Chatbot()
113
+ msg = gr.Textbox(label="Type your message here...")
114
+ clear = gr.Button("Clear Chat")
115
+
116
+ chat_index_choice = gr.Dropdown(
117
+ choices=["Assistant Professors and Above", "Above Assistant Professors"],
118
+ label="Select Index for Chat",
119
+ value="Assistant Professors and Above"
120
+ )
121
+
122
+ msg.submit(chat_query, inputs=[msg, chatbot, chat_index_choice], outputs=[chatbot, msg])
123
+ clear.click(lambda: ([], ""), outputs=[chatbot, msg])
124
+
125
+ if __name__ == "__main__":
126
+ demo.queue()
127
+ demo.launch(share=True)
requirements.txt ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ PyPDF2==3.0.1
2
+ tqdm==4.66.4
3
+ python-dotenv==1.0.1
4
+ gradio==4.36.1
5
+ gradio_client==1.0.1
6
+ pandas==2.2.2
7
+ gpt4all==2.7.0
8
+ langchain-openai>=0.1.21
9
+ faiss-cpu>=1.8.0
10
+ tenacity==8.4.1
11
+ pypdf==4.2.0
12
+ openai
13
+ langchain-community
14
+ langchain-core
15
+ asyncio
16
+ aiohttp
17
+ python-docx