chatbot-sverige / app.py
organicoder's picture
Upload 5 files
a2e4dd9 verified
raw
history blame
3.91 kB
import gradio as gr
import openai
import os
from typing import List, Tuple
from config import Config
from pdf_processor import PDFProcessor
# Validate configuration
try:
Config.validate()
except ValueError as e:
print(f"Configuration Error: {e}")
exit(1)
# Initialize OpenAI client
client = openai.OpenAI(
api_key=Config.OPENAI_API_KEY
)
# Initialize PDF processor
pdf_processor = PDFProcessor()
# Try to load existing vector store, otherwise process PDF
if not pdf_processor.load_vector_store():
print("πŸ”„ Processing PDF for the first time...")
if pdf_processor.process_pdf():
pdf_processor.save_vector_store()
else:
print("⚠️ PDF processing failed. Chatbot will work without PDF knowledge.")
def chat_with_bot(message: str, history: List[Tuple[str, str]]) -> Tuple[str, List[Tuple[str, str]]]:
"""
Chat function that handles conversation with OpenAI API and PDF knowledge
"""
if not message.strip():
return "", history
# Search for relevant PDF content
pdf_context = ""
try:
if pdf_processor.vector_store:
relevant_chunks = pdf_processor.search_similar_content(message, k=2)
if relevant_chunks:
pdf_context = "\n\nRelevant information from the Health Tech Hub Copenhagen document:\n" + "\n".join(relevant_chunks)
except Exception as e:
print(f"Warning: Could not search PDF content: {e}")
# Prepare conversation history for OpenAI
messages = [
{"role": "system", "content": Config.SYSTEM_PROMPT + "\n\nYou have access to information about Health Tech Hub Copenhagen. Use this information when relevant to answer questions."}
]
# Add conversation history
for human, assistant in history:
messages.append({"role": "user", "content": human})
messages.append({"role": "assistant", "content": assistant})
# Add current message with PDF context
full_message = message
if pdf_context:
full_message = f"{message}\n\n{pdf_context}"
messages.append({"role": "user", "content": full_message})
try:
# Get response from OpenAI
response = client.chat.completions.create(
model=Config.OPENAI_MODEL,
messages=messages,
max_tokens=Config.MAX_TOKENS,
temperature=Config.TEMPERATURE
)
assistant_response = response.choices[0].message.content
# Update history
history.append((message, assistant_response))
return "", history
except Exception as e:
error_message = f"Sorry, I encountered an error: {str(e)}"
history.append((message, error_message))
return "", history
def clear_chat():
"""Clear the chat history"""
return []
# Create Gradio interface
with gr.Blocks(
title=Config.GRADIO_TITLE,
theme=gr.themes.Soft(),
css="""
.gradio-container {
max-width: 800px;
margin: auto;
}
"""
) as demo:
gr.Markdown(
"""
# πŸ€– AI Chatbot
Welcome! I'm your AI assistant. Feel free to ask me anything!
---
"""
)
# Chat interface
chatbot = gr.Chatbot(
height=Config.GRADIO_HEIGHT,
show_label=False,
container=True,
bubble_full_width=False
)
# Message input
msg = gr.Textbox(
placeholder="Type your message here...",
show_label=False,
container=False
)
# Clear button
clear = gr.Button("Clear Chat", variant="secondary")
# Set up event handlers
msg.submit(
chat_with_bot,
inputs=[msg, chatbot],
outputs=[msg, chatbot]
)
clear.click(
clear_chat,
outputs=chatbot
)
# Launch the app
if __name__ == "__main__":
demo.launch()