|
|
|
|
|
import gradio as gr |
|
|
import os |
|
|
import sys |
|
|
import time |
|
|
import threading |
|
|
from pathlib import Path |
|
|
|
|
|
|
|
|
project_root = Path(__file__).parent.absolute() |
|
|
sys.path.insert(0, str(project_root)) |
|
|
|
|
|
|
|
|
os.environ['HUGGINGFACE_SPACES'] = '1' |
|
|
os.environ['GRADIO_SERVER_NAME'] = '0.0.0.0' |
|
|
os.environ['GRADIO_SERVER_PORT'] = '7860' |
|
|
os.environ['PYTHONUNBUFFERED'] = '1' |
|
|
os.environ['FLASK_ENV'] = 'production' |
|
|
|
|
|
|
|
|
from main import app as flask_app, get_chat_response, init_db |
|
|
|
|
|
|
|
|
current_user = None |
|
|
chat_history = [] |
|
|
|
|
|
def start_flask_app(): |
|
|
"""Start Flask app in background thread""" |
|
|
print("π Starting Flask app on port 5001...") |
|
|
try: |
|
|
with flask_app.app_context(): |
|
|
init_db() |
|
|
flask_app.run(host='127.0.0.1', port=5001, debug=False, threaded=True, use_reloader=False) |
|
|
except Exception as e: |
|
|
print(f"β Error starting Flask app: {e}") |
|
|
|
|
|
def create_user_session(name="Guest"): |
|
|
"""Create a user session""" |
|
|
global current_user |
|
|
current_user = {'id': f'gradio_user_{int(time.time())}', 'name': name, 'session_id': f'session_{int(time.time())}'} |
|
|
return f"Welcome {name}! Ready to chat." |
|
|
|
|
|
def process_chat_message(message, history): |
|
|
"""Process chat message using Flask backend""" |
|
|
global current_user |
|
|
if not current_user: |
|
|
create_user_session("Guest") |
|
|
if not message.strip(): |
|
|
return history, "" |
|
|
history.append([message, None]) |
|
|
try: |
|
|
with flask_app.app_context(): |
|
|
response = get_chat_response(message, current_user.get('session_id', 'default')) |
|
|
if isinstance(response, dict): |
|
|
ai_response = response.get('response', 'Sorry, I encountered an error.') |
|
|
else: |
|
|
ai_response = str(response) |
|
|
history[-1][1] = ai_response |
|
|
except Exception as e: |
|
|
print(f"β Error processing chat: {e}") |
|
|
history[-1][1] = "I'm having trouble processing your message. Please try again." |
|
|
return history, "" |
|
|
|
|
|
def create_interface(): |
|
|
"""Create the Gradio interface""" |
|
|
with gr.Blocks(title="Mental Health AI Assistant") as interface: |
|
|
gr.Markdown("# π§ Mental Health AI Assistant\n### Powered by Flask + FastAPI + Gradio on Hugging Face Spaces") |
|
|
|
|
|
with gr.Tabs(): |
|
|
with gr.Tab("π¬ Chat"): |
|
|
chatbot = gr.Chatbot(label="Mental Health Assistant", height=500, show_copy_button=True) |
|
|
with gr.Row(): |
|
|
msg_input = gr.Textbox(placeholder="Type your message here...", show_label=False, scale=4) |
|
|
send_btn = gr.Button("Send", variant="primary", scale=1) |
|
|
clear_btn = gr.Button("Clear Chat") |
|
|
session_info = gr.Textbox(value="Click 'Start Session' to begin", label="Session") |
|
|
new_session_btn = gr.Button("Start New Session") |
|
|
|
|
|
with gr.Tab("βΉοΈ About"): |
|
|
gr.Markdown(""" |
|
|
### About This Application |
|
|
This Mental Health AI Assistant is deployed on Hugging Face Spaces using Docker. |
|
|
|
|
|
**Architecture:** |
|
|
- Frontend: Gradio interface (port 7860) |
|
|
- Flask Backend: Main application logic (port 5001) |
|
|
- FastAPI Backend: AI services (port 8001) |
|
|
- Database: SQLite for data persistence |
|
|
""") |
|
|
|
|
|
|
|
|
send_btn.click(process_chat_message, [msg_input, chatbot], [chatbot, msg_input]) |
|
|
msg_input.submit(process_chat_message, [msg_input, chatbot], [chatbot, msg_input]) |
|
|
clear_btn.click(lambda: [], outputs=[chatbot]) |
|
|
new_session_btn.click(create_user_session, outputs=[session_info]) |
|
|
|
|
|
return interface |
|
|
|
|
|
def main(): |
|
|
"""Main function to start all services""" |
|
|
print("π Starting Mental Health AI Assistant for Hugging Face Spaces Docker...") |
|
|
|
|
|
|
|
|
flask_thread = threading.Thread(target=start_flask_app, daemon=True) |
|
|
flask_thread.start() |
|
|
|
|
|
|
|
|
print("β³ Waiting for Flask to initialize...") |
|
|
time.sleep(3) |
|
|
|
|
|
|
|
|
print("π¨ Creating Gradio interface...") |
|
|
interface = create_interface() |
|
|
|
|
|
print("π Launching on Hugging Face Spaces (Docker)...") |
|
|
interface.launch(server_name="0.0.0.0", server_port=7860, share=False, debug=False, show_error=True, enable_queue=True) |
|
|
|
|
|
if __name__ == "__main__": |
|
|
main() |
|
|
|