from fastapi import FastAPI from fastapi.middleware.cors import CORSMiddleware from pydantic import BaseModel from transformers import AutoTokenizer import onnxruntime as ort import numpy as np from pathlib import Path import traceback # import pandas as pd # Dihapus karena tidak dipakai app = FastAPI() # === CORS untuk frontend di Vercel === app.add_middleware( CORSMiddleware, allow_origins=["*"], allow_credentials=True, allow_methods=["*"], allow_headers=["*"], ) # === Path setup === BASE_DIR = Path(__file__).resolve().parent MODEL_PATH = BASE_DIR / "models" / "bert_chatbot.onnx" TOKENIZER_PATH = BASE_DIR / "models" / "bert-base-multilingual-cased" # DATASET_PATH tidak diperlukan lagi # === Global Variables === tokenizer = None session = None # === Load tokenizer dan model === try: print("🚀 Loading ONNX model...") tokenizer = AutoTokenizer.from_pretrained(str(TOKENIZER_PATH)) session = ort.InferenceSession(str(MODEL_PATH), providers=["CPUExecutionProvider"]) print("✅ ONNX model loaded!") except Exception as e: # Log ini akan muncul jika ada masalah Path/File saat startup print("--------------------------------------------------") print(f"❌ FATAL ERROR SAAT MEMUAT ONNX/SUMBER DAYA: {e}") traceback.print_exc() print("--------------------------------------------------") pass # === Default responses === responses = { # ⭐ PERBAIKAN INI MEMASTIKAN JAWABAN GREETING BENAR ⭐ "about_me": "I am a passionate developer specializing in AI and web development.", "skills": "My main skills are HTML5, CSS3, JavaScript, Laravel, Node.js, Database, TensorFlow, PyTorch, Firebase, and Jupyter Notebook.", "projects": "Some of my projects are Mobile Apps Bald Detection and Jupyter Notebook Bald Detection.", "experience": "I have worked as IT Support, AI Engineer, and Freelancer on multiple projects.", "career_goal": "My career goal is to become a Full Stack Developer and Machine Learning Engineer.", "greeting": "Hello! How can I help you regarding this portfolio?", # JAWABAN GREETING YANG BENAR "fallback": "I'm sorry, I don't understand. Please ask another question." } # === Request schema === class ChatRequest(BaseModel): text: str @app.get("/") async def root(): return {"message": "🚀 ONNX Chatbot API running on Hugging Face"} @app.post("/chatbot") async def chatbot(req: ChatRequest): # Deklarasikan intent sebagai fallback untuk mencegah NameError di blok except intent = "fallback" if session is None: return {"reply": responses["fallback"], "intent": "error_loading"} try: # 1. Tokenisasi (return_tensors="np") inputs = tokenizer(req.text, return_tensors="np", padding=True, truncation=True, max_length=128) # 2. Perkuat Tipe Data untuk ONNX input_names = [i.name for i in session.get_inputs()] ort_inputs = {} for name in input_names: if name in inputs: ort_inputs[name] = inputs[name].astype(np.int64) # 3. Inferensi ONNX ort_outputs = session.run(None, ort_inputs) # 4. Ambil Logit dan Prediksi logits = ort_outputs[0] pred_id = np.argmax(logits, axis=1)[0] # === Mapping ID ke label === id2label = { 0: "greeting", 1: "about_me", 2: "skills", 3: "projects", 4: "experience", 5: "career_goal", 6: "fallback", } # Nilai 'intent' yang benar disimpan di sini intent = id2label.get(pred_id, "fallback") # === Ambil jawaban === # Respon diambil dari dictionary responses global reply = responses.get(intent, responses["fallback"]) return {"reply": str(reply), "intent": intent} except Exception as e: import traceback print(f"❌ Runtime error in /chatbot: {e}") traceback.print_exc() return {"reply": "⚠️ Internal server error.", "intent": intent}