Famazo commited on
Commit
e1f3d9e
Β·
1 Parent(s): cf7d8ac

Update backend/api.py

Browse files
Files changed (1) hide show
  1. backend/api.py +46 -57
backend/api.py CHANGED
@@ -3,57 +3,63 @@ from fastapi.middleware.cors import CORSMiddleware
3
  from pydantic import BaseModel
4
  from transformers import AutoTokenizer
5
  import onnxruntime as ort
6
- import numpy as np
7
  from pathlib import Path
8
  import traceback
9
 
 
10
  app = FastAPI()
11
 
12
- # === CORS untuk frontend di Vercel ===
13
  app.add_middleware(
14
  CORSMiddleware,
15
- allow_origins=["*"],
16
  allow_credentials=True,
17
  allow_methods=["*"],
18
  allow_headers=["*"],
19
  )
20
 
21
- # === Path setup ===
22
  BASE_DIR = Path(__file__).resolve().parent
23
-
24
- # PASTIKAN PATH INI SESUAI DENGAN REPOSITORI ANDA
25
  MODEL_PATH = BASE_DIR / "models" / "bert_chatbot.onnx"
26
- TOKENIZER_PATH = BASE_DIR / "models" / "bert-base-multilingual-cased"
27
 
28
- # === Global Variables ===
29
  tokenizer = None
30
  session = None
31
 
32
- # === Load tokenizer dan model ===
33
  try:
34
- print("πŸš€ Loading ONNX model...")
35
-
36
  tokenizer = AutoTokenizer.from_pretrained(str(TOKENIZER_PATH))
37
  session = ort.InferenceSession(str(MODEL_PATH), providers=["CPUExecutionProvider"])
38
-
39
- print("βœ… ONNX model loaded!")
40
-
41
  except Exception as e:
42
- print("--------------------------------------------------")
43
- print(f"❌ FATAL ERROR SAAT MEMUAT ONNX/SUMBER DAYA: {e}")
44
  traceback.print_exc()
45
- print("--------------------------------------------------")
46
- pass
47
 
48
- # === Default responses ===
 
 
 
 
 
 
 
 
 
 
 
 
 
49
  responses = {
50
  "about_me": "I am a passionate developer specializing in AI and web development.",
51
- "skills": "My main skills are HTML5, CSS3, JavaScript, Laravel, Node.js, TensorFlow, and PyTorch.",
52
- "projects": "Some of my projects include Mobile Apps Bald Detection and Portfolio Website.",
53
- "experience": "I have worked as IT Support, AI Engineer, and Freelancer.",
54
- "career_goal": "My career goal is to become a Full Stack Developer and ML Engineer.",
55
  "greeting": "Hello! How can I help you regarding this portfolio?",
56
- "fallback": "I'm sorry, I don't understand. Please try another question."
57
  }
58
 
59
  # === Request schema ===
@@ -62,52 +68,35 @@ class ChatRequest(BaseModel):
62
 
63
  @app.get("/")
64
  async def root():
65
- return {"message": "πŸš€ ONNX Chatbot API running on Hugging Face"}
66
 
67
  @app.post("/chatbot")
68
  async def chatbot(req: ChatRequest):
69
- # ⭐ PERBAIKAN: Deklarasikan 'intent' di luar blok try agar bisa diakses di 'except'
70
- intent = "fallback"
71
-
72
  if session is None:
73
  return {"reply": responses["fallback"], "intent": "error_loading"}
74
-
75
  try:
76
- # 1. Tokenisasi (return_tensors="np")
77
  inputs = tokenizer(req.text, return_tensors="np", padding=True, truncation=True, max_length=128)
78
-
79
- # 2. Perkuat Tipe Data untuk ONNX (WAJIB)
80
- input_names = [i.name for i in session.get_inputs()]
81
- ort_inputs = {}
82
 
83
- for name in input_names:
84
- if name in inputs:
85
- ort_inputs[name] = inputs[name].astype(np.int64)
86
 
87
- # 3. Inferensi ONNX
88
  ort_outputs = session.run(None, ort_inputs)
89
-
90
- # 4. Ambil Logit dan Prediksi
91
  logits = ort_outputs[0]
92
  pred_id = np.argmax(logits, axis=1)[0]
93
-
94
- # === Mapping ID ke label ===
95
- id2label = {
96
- 0: "greeting", 1: "about_me", 2: "skills", 3: "projects",
97
- 4: "experience", 5: "career_goal", 6: "fallback", # <--- Gunakan semua intent Anda
98
- }
99
-
100
- # Nilai 'intent' yang benar disimpan di sini
101
  intent = id2label.get(pred_id, "fallback")
102
-
103
- # === Ambil jawaban ===
104
  reply = responses.get(intent, responses["fallback"])
105
-
106
- return {"reply": str(reply), "intent": intent}
107
-
 
 
108
  except Exception as e:
109
- import traceback
110
- print(f"❌ Runtime error in /chatbot: {e}")
111
  traceback.print_exc()
112
- # Mengembalikan error dengan nilai 'intent' yang dijamin terdefinisi
113
- return {"reply": "⚠️ Internal server error.", "intent": intent} # <--- TIDAK AKAN CRASH LAGI
 
3
  from pydantic import BaseModel
4
  from transformers import AutoTokenizer
5
  import onnxruntime as ort
6
+ import numpy as np
7
  from pathlib import Path
8
  import traceback
9
 
10
+ # === Inisialisasi FastAPI ===
11
  app = FastAPI()
12
 
13
+ # === CORS untuk frontend (misal dari Vercel) ===
14
  app.add_middleware(
15
  CORSMiddleware,
16
+ allow_origins=["*"], # bisa disesuaikan agar lebih aman
17
  allow_credentials=True,
18
  allow_methods=["*"],
19
  allow_headers=["*"],
20
  )
21
 
22
+ # === Path ===
23
  BASE_DIR = Path(__file__).resolve().parent
 
 
24
  MODEL_PATH = BASE_DIR / "models" / "bert_chatbot.onnx"
25
+ TOKENIZER_PATH = BASE_DIR / "models" / "bert_chatbot_tokenizer"
26
 
27
+ # === Global variable ===
28
  tokenizer = None
29
  session = None
30
 
31
+ # === Load model dan tokenizer ===
32
  try:
33
+ print("πŸš€ Loading tokenizer dan ONNX model...")
 
34
  tokenizer = AutoTokenizer.from_pretrained(str(TOKENIZER_PATH))
35
  session = ort.InferenceSession(str(MODEL_PATH), providers=["CPUExecutionProvider"])
36
+ print("βœ… Model dan tokenizer berhasil dimuat!")
 
 
37
  except Exception as e:
38
+ print("❌ ERROR saat memuat model/tokenizer:", e)
 
39
  traceback.print_exc()
 
 
40
 
41
+ # === Label mapping (HARUS SAMA DENGAN TRAINING) ===
42
+ id2label = {
43
+ 0: "about_me",
44
+ 1: "career_goal",
45
+ 2: "experience",
46
+ 3: "fallback",
47
+ 4: "greeting",
48
+ 5: "projects",
49
+ 6: "skills",
50
+ }
51
+
52
+ label2id = {v: k for k, v in id2label.items()}
53
+
54
+ # === Kamus respon SAMA dengan training ===
55
  responses = {
56
  "about_me": "I am a passionate developer specializing in AI and web development.",
57
+ "skills": "My main skills are HTML5, CSS3, JavaScript, Laravel, Node.js, Database, TensorFlow, PyTorch, Firebase, and Jupyter Notebook.",
58
+ "projects": "Some of my projects are Mobile Apps Bald Detection and Jupyter Notebook Bald Detection.",
59
+ "experience": "I have worked as IT Support, AI Engineer, and Freelancer on multiple projects.",
60
+ "career_goal": "My career goal is to become a Full Stack Developer and Machine Learning Engineer.",
61
  "greeting": "Hello! How can I help you regarding this portfolio?",
62
+ "fallback": "I'm sorry, I don't understand. Please ask another question."
63
  }
64
 
65
  # === Request schema ===
 
68
 
69
  @app.get("/")
70
  async def root():
71
+ return {"message": "πŸš€ Chatbot API (ONNX) running on Hugging Face"}
72
 
73
  @app.post("/chatbot")
74
  async def chatbot(req: ChatRequest):
75
+ intent = "fallback"
76
+
 
77
  if session is None:
78
  return {"reply": responses["fallback"], "intent": "error_loading"}
79
+
80
  try:
81
+ # === Tokenisasi input ===
82
  inputs = tokenizer(req.text, return_tensors="np", padding=True, truncation=True, max_length=128)
 
 
 
 
83
 
84
+ # === Siapkan input untuk ONNX ===
85
+ ort_inputs = {k: v.astype(np.int64) for k, v in inputs.items()}
 
86
 
87
+ # === Inferensi ===
88
  ort_outputs = session.run(None, ort_inputs)
 
 
89
  logits = ort_outputs[0]
90
  pred_id = np.argmax(logits, axis=1)[0]
91
+
92
+ # === Ambil intent & respon ===
 
 
 
 
 
 
93
  intent = id2label.get(pred_id, "fallback")
 
 
94
  reply = responses.get(intent, responses["fallback"])
95
+
96
+ print(f"🧠 Input: {req.text} | Intent: {intent} | Reply: {reply}")
97
+
98
+ return {"reply": reply, "intent": intent}
99
+
100
  except Exception as e:
 
 
101
  traceback.print_exc()
102
+ return {"reply": "⚠️ Internal server error.", "intent": intent}