Famazo commited on
Commit
37ebc59
Β·
1 Parent(s): 797b1ae

Update backend/api.py

Browse files
Files changed (1) hide show
  1. backend/api.py +42 -60
backend/api.py CHANGED
@@ -3,58 +3,62 @@ from fastapi.middleware.cors import CORSMiddleware
3
  from pydantic import BaseModel
4
  from transformers import AutoTokenizer
5
  import onnxruntime as ort
6
- import numpy as np
7
  from pathlib import Path
8
  import traceback
9
- # import pandas as pd # Dihapus karena tidak dipakai
10
 
 
11
  app = FastAPI()
12
 
13
- # === CORS untuk frontend di Vercel ===
14
  app.add_middleware(
15
  CORSMiddleware,
16
- allow_origins=["*"],
17
  allow_credentials=True,
18
  allow_methods=["*"],
19
  allow_headers=["*"],
20
  )
21
 
22
- # === Path setup ===
23
  BASE_DIR = Path(__file__).resolve().parent
24
  MODEL_PATH = BASE_DIR / "models" / "bert_chatbot.onnx"
25
- TOKENIZER_PATH = BASE_DIR / "models" / "bert-base-multilingual-cased"
26
- # DATASET_PATH tidak diperlukan lagi
27
 
28
- # === Global Variables ===
29
  tokenizer = None
30
  session = None
31
 
32
- # === Load tokenizer dan model ===
33
  try:
34
- print("πŸš€ Loading ONNX model...")
35
-
36
  tokenizer = AutoTokenizer.from_pretrained(str(TOKENIZER_PATH))
37
  session = ort.InferenceSession(str(MODEL_PATH), providers=["CPUExecutionProvider"])
38
-
39
- print("βœ… ONNX model loaded!")
40
-
41
  except Exception as e:
42
- # Log ini akan muncul jika ada masalah Path/File saat startup
43
- print("--------------------------------------------------")
44
- print(f"❌ FATAL ERROR SAAT MEMUAT ONNX/SUMBER DAYA: {e}")
45
  traceback.print_exc()
46
- print("--------------------------------------------------")
47
- pass
48
 
49
- # === Default responses ===
 
 
 
 
 
 
 
 
 
 
 
 
 
50
  responses = {
51
- # ⭐ PERBAIKAN INI MEMASTIKAN JAWABAN GREETING BENAR ⭐
52
  "about_me": "I am a passionate developer specializing in AI and web development.",
53
  "skills": "My main skills are HTML5, CSS3, JavaScript, Laravel, Node.js, Database, TensorFlow, PyTorch, Firebase, and Jupyter Notebook.",
54
  "projects": "Some of my projects are Mobile Apps Bald Detection and Jupyter Notebook Bald Detection.",
55
  "experience": "I have worked as IT Support, AI Engineer, and Freelancer on multiple projects.",
56
  "career_goal": "My career goal is to become a Full Stack Developer and Machine Learning Engineer.",
57
- "greeting": "Hello! How can I help you regarding this portfolio?", # JAWABAN GREETING YANG BENAR
58
  "fallback": "I'm sorry, I don't understand. Please ask another question."
59
  }
60
 
@@ -64,57 +68,35 @@ class ChatRequest(BaseModel):
64
 
65
  @app.get("/")
66
  async def root():
67
- return {"message": "πŸš€ ONNX Chatbot API running on Hugging Face"}
68
 
69
  @app.post("/chatbot")
70
  async def chatbot(req: ChatRequest):
71
- # Deklarasikan intent sebagai fallback untuk mencegah NameError di blok except
72
- intent = "fallback"
73
-
74
  if session is None:
75
  return {"reply": responses["fallback"], "intent": "error_loading"}
76
-
77
  try:
78
- # 1. Tokenisasi (return_tensors="np")
79
  inputs = tokenizer(req.text, return_tensors="np", padding=True, truncation=True, max_length=128)
80
-
81
- # 2. Perkuat Tipe Data untuk ONNX
82
- input_names = [i.name for i in session.get_inputs()]
83
- ort_inputs = {}
84
 
85
- for name in input_names:
86
- if name in inputs:
87
- ort_inputs[name] = inputs[name].astype(np.int64)
88
 
89
- # 3. Inferensi ONNX
90
  ort_outputs = session.run(None, ort_inputs)
91
-
92
- # 4. Ambil Logit dan Prediksi
93
  logits = ort_outputs[0]
94
  pred_id = np.argmax(logits, axis=1)[0]
95
-
96
- # === Mapping ID ke label ===
97
- id2label = {
98
- 0: "about_me",
99
- 1: "career_goal",
100
- 2: "experience",
101
- 3: "fallback",
102
- 4: "greeting",
103
- 5: "projects",
104
- 6: "skills",
105
- }
106
-
107
- # Nilai 'intent' yang benar disimpan di sini
108
  intent = id2label.get(pred_id, "fallback")
109
-
110
- # === Ambil jawaban ===
111
- # Respon diambil dari dictionary responses global
112
  reply = responses.get(intent, responses["fallback"])
113
-
114
- return {"reply": str(reply), "intent": intent}
115
-
 
 
116
  except Exception as e:
117
- import traceback
118
- print(f"❌ Runtime error in /chatbot: {e}")
119
  traceback.print_exc()
120
- return {"reply": "⚠️ Internal server error.", "intent": intent}
 
3
  from pydantic import BaseModel
4
  from transformers import AutoTokenizer
5
  import onnxruntime as ort
6
+ import numpy as np
7
  from pathlib import Path
8
  import traceback
 
9
 
10
+ # === Inisialisasi FastAPI ===
11
  app = FastAPI()
12
 
13
+ # === CORS untuk frontend (misal dari Vercel) ===
14
  app.add_middleware(
15
  CORSMiddleware,
16
+ allow_origins=["*"], # bisa disesuaikan agar lebih aman
17
  allow_credentials=True,
18
  allow_methods=["*"],
19
  allow_headers=["*"],
20
  )
21
 
22
+ # === Path ===
23
  BASE_DIR = Path(__file__).resolve().parent
24
  MODEL_PATH = BASE_DIR / "models" / "bert_chatbot.onnx"
25
+ TOKENIZER_PATH = BASE_DIR / "models" / "bert-base-multilingual-cased"
 
26
 
27
+ # === Global variable ===
28
  tokenizer = None
29
  session = None
30
 
31
+ # === Load model dan tokenizer ===
32
  try:
33
+ print("πŸš€ Loading tokenizer dan ONNX model...")
 
34
  tokenizer = AutoTokenizer.from_pretrained(str(TOKENIZER_PATH))
35
  session = ort.InferenceSession(str(MODEL_PATH), providers=["CPUExecutionProvider"])
36
+ print("βœ… Model dan tokenizer berhasil dimuat!")
 
 
37
  except Exception as e:
38
+ print("❌ ERROR saat memuat model/tokenizer:", e)
 
 
39
  traceback.print_exc()
 
 
40
 
41
+ # === Label mapping (HARUS SAMA DENGAN TRAINING) ===
42
+ id2label = {
43
+ 0: "about_me",
44
+ 1: "career_goal",
45
+ 2: "experience",
46
+ 3: "fallback",
47
+ 4: "greeting",
48
+ 5: "projects",
49
+ 6: "skills",
50
+ }
51
+
52
+ label2id = {v: k for k, v in id2label.items()}
53
+
54
+ # === Kamus respon SAMA dengan training ===
55
  responses = {
 
56
  "about_me": "I am a passionate developer specializing in AI and web development.",
57
  "skills": "My main skills are HTML5, CSS3, JavaScript, Laravel, Node.js, Database, TensorFlow, PyTorch, Firebase, and Jupyter Notebook.",
58
  "projects": "Some of my projects are Mobile Apps Bald Detection and Jupyter Notebook Bald Detection.",
59
  "experience": "I have worked as IT Support, AI Engineer, and Freelancer on multiple projects.",
60
  "career_goal": "My career goal is to become a Full Stack Developer and Machine Learning Engineer.",
61
+ "greeting": "Hello! How can I help you regarding this portfolio?",
62
  "fallback": "I'm sorry, I don't understand. Please ask another question."
63
  }
64
 
 
68
 
69
  @app.get("/")
70
  async def root():
71
+ return {"message": "πŸš€ Chatbot API (ONNX) running on Hugging Face"}
72
 
73
  @app.post("/chatbot")
74
  async def chatbot(req: ChatRequest):
75
+ intent = "fallback"
76
+
 
77
  if session is None:
78
  return {"reply": responses["fallback"], "intent": "error_loading"}
79
+
80
  try:
81
+ # === Tokenisasi input ===
82
  inputs = tokenizer(req.text, return_tensors="np", padding=True, truncation=True, max_length=128)
 
 
 
 
83
 
84
+ # === Siapkan input untuk ONNX ===
85
+ ort_inputs = {k: v.astype(np.int64) for k, v in inputs.items()}
 
86
 
87
+ # === Inferensi ===
88
  ort_outputs = session.run(None, ort_inputs)
 
 
89
  logits = ort_outputs[0]
90
  pred_id = np.argmax(logits, axis=1)[0]
91
+
92
+ # === Ambil intent & respon ===
 
 
 
 
 
 
 
 
 
 
 
93
  intent = id2label.get(pred_id, "fallback")
 
 
 
94
  reply = responses.get(intent, responses["fallback"])
95
+
96
+ print(f"🧠 Input: {req.text} | Intent: {intent} | Reply: {reply}")
97
+
98
+ return {"reply": reply, "intent": intent}
99
+
100
  except Exception as e:
 
 
101
  traceback.print_exc()
102
+ return {"reply": "⚠️ Internal server error.", "intent": intent}