Khoi1234210 commited on
Commit
3aa12d6
·
verified ·
1 Parent(s): 22a8051

Fixing the issues with AI

Browse files
Files changed (1) hide show
  1. app.py +30 -6
app.py CHANGED
@@ -105,7 +105,7 @@ def try_sympy_compute(message):
105
 
106
  try:
107
  # Definite integrals
108
- match = re.search(r'int(?:egral)?(?:\s+of)?\s+(.+?)\s+from\s+(.+?)\s+to\s+(.+?)(?:\s|$)', msg_lower)
109
  if match:
110
  expr_str, lower, upper = match.groups()
111
  expr = parse_expr(expr_str.replace('^', '**'), transformations=transforms)
@@ -113,7 +113,7 @@ def try_sympy_compute(message):
113
  return sp.latex(result, mode='plain')
114
 
115
  # Derivatives
116
- match = re.search(r'deriv(?:ative)?(?:\s+of)?\s+(.+?)(?:\s|$)', msg_lower)
117
  if match:
118
  expr_str = match.group(1).strip()
119
  expr = parse_expr(expr_str.replace('^', '**'), transformations=transforms)
@@ -121,7 +121,7 @@ def try_sympy_compute(message):
121
  return sp.latex(result, inv_trig_style='power')
122
 
123
  # Limits
124
- match = re.search(r'lim(?:it)?.*?(?:\()?(.+?)\s+(?:as\s+)?(?:x\s*)?(?:→|->|to)\s*(.+?)(?:\)|$)', msg_lower)
125
  if match:
126
  expr_str, to_val = match.groups()
127
  expr = parse_expr(expr_str.replace('^', '**'), transformations=transforms)
@@ -129,7 +129,7 @@ def try_sympy_compute(message):
129
  return sp.latex(result)
130
 
131
  # Triangle area (Heron's formula)
132
- match = re.search(r'(\d+)[,\s-]+(\d+)[,\s-]+(\d+)', message)
133
  if match and 'triangle' in msg_lower:
134
  a, b, c = map(float, match.groups())
135
  s = (a + b + c) / 2
@@ -143,11 +143,32 @@ def try_sympy_compute(message):
143
 
144
  def respond(message, history, system_message, max_tokens, temperature, top_p):
145
  """Streaming response with error handling"""
146
- client = InferenceClient(model="Qwen/Qwen2.5-Math-72B-Instruct")
 
 
 
 
 
 
 
 
 
147
 
148
  messages = [{"role": "system", "content": system_message}]
 
 
149
  for msg in history:
150
- messages.append({"role": msg["role"], "content": msg["content"]})
 
 
 
 
 
 
 
 
 
 
151
  messages.append({"role": "user", "content": message})
152
 
153
  try:
@@ -240,6 +261,9 @@ with gr.Blocks(title="🧮 Mathetics AI", theme=gr.themes.Soft(), css="""
240
 
241
  def chat_response(message, history, sys_msg, max_tok, temp, top_p):
242
  """Handle chat with streaming"""
 
 
 
243
  history.append({"role": "user", "content": message})
244
 
245
  # Create assistant message slot
 
105
 
106
  try:
107
  # Definite integrals
108
+ match = re.search(r'\bint(?:egral)?(?:\s+of)?\s+(.+?)\s+from\s+(.+?)\s+to\s+(.+?)(?:\s|$)', msg_lower)
109
  if match:
110
  expr_str, lower, upper = match.groups()
111
  expr = parse_expr(expr_str.replace('^', '**'), transformations=transforms)
 
113
  return sp.latex(result, mode='plain')
114
 
115
  # Derivatives
116
+ match = re.search(r'\bderiv(?:ative)?(?:\s+of)?\s+(.+?)(?:\s|$)', msg_lower)
117
  if match:
118
  expr_str = match.group(1).strip()
119
  expr = parse_expr(expr_str.replace('^', '**'), transformations=transforms)
 
121
  return sp.latex(result, inv_trig_style='power')
122
 
123
  # Limits
124
+ match = re.search(r'\blim(?:it)?.*?(?:\()?(.+?)\s+(?:as\s+)?(?:x\s*)?(?:→|->|to)\s*(.+?)(?:\)|$)', msg_lower)
125
  if match:
126
  expr_str, to_val = match.groups()
127
  expr = parse_expr(expr_str.replace('^', '**'), transformations=transforms)
 
129
  return sp.latex(result)
130
 
131
  # Triangle area (Heron's formula)
132
+ match = re.search(r'\b(?:triangle|area)\b.*?(\d+)[,\s-]+(\d+)[,\s-]+(\d+)', message)
133
  if match and 'triangle' in msg_lower:
134
  a, b, c = map(float, match.groups())
135
  s = (a + b + c) / 2
 
143
 
144
  def respond(message, history, system_message, max_tokens, temperature, top_p):
145
  """Streaming response with error handling"""
146
+ # Get HF token from environment
147
+ hf_token = os.getenv("HF_TOKEN")
148
+ if not hf_token:
149
+ yield "❌ **Error:** HF_TOKEN not found. Set it in your environment or Hugging Face Spaces secrets."
150
+ return
151
+
152
+ client = InferenceClient(
153
+ model="Qwen/Qwen2.5-Math-72B-Instruct",
154
+ token=hf_token
155
+ )
156
 
157
  messages = [{"role": "system", "content": system_message}]
158
+
159
+ # Safely handle history format
160
  for msg in history:
161
+ if isinstance(msg, dict):
162
+ role = msg.get("role", "user")
163
+ content = msg.get("content", "")
164
+ else:
165
+ # Fallback for unexpected formats
166
+ role = "user"
167
+ content = str(msg)
168
+
169
+ if content: # Only add non-empty messages
170
+ messages.append({"role": role, "content": content})
171
+
172
  messages.append({"role": "user", "content": message})
173
 
174
  try:
 
261
 
262
  def chat_response(message, history, sys_msg, max_tok, temp, top_p):
263
  """Handle chat with streaming"""
264
+ if not message.strip():
265
+ return history, ""
266
+
267
  history.append({"role": "user", "content": message})
268
 
269
  # Create assistant message slot