Khoi1234210 commited on
Commit
efbfdc1
·
verified ·
1 Parent(s): 6a748ed

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +73 -132
app.py CHANGED
@@ -4,18 +4,19 @@ from huggingface_hub import InferenceClient
4
  from datasets import load_dataset
5
  import random
6
  import re
 
7
  # Load math datasets for sample problems
8
  fw = load_dataset("HuggingFaceFW/fineweb-edu", name="sample-10BT", split="train", streaming=True)
9
  ds = load_dataset("HuggingFaceH4/ultrachat_200k", streaming=True)
 
10
  def load_sample_problems():
11
  """Load sample problems from math datasets"""
12
  try:
13
- # Load GSM8K for sample problems
14
  gsm8k = load_dataset("openai/gsm8k", "main", streaming=True)
15
  samples = []
16
  for i, item in enumerate(gsm8k["train"]):
17
  samples.append(item["question"])
18
- if i >= 50: # Load 50 samples
19
  break
20
  return samples
21
  except:
@@ -27,7 +28,6 @@ def load_sample_problems():
27
  "Solve the system: x + 2y = 7, 3x - y = 4"
28
  ]
29
 
30
- # Initialize sample problems
31
  math_samples = load_sample_problems()
32
 
33
  def create_math_system_message():
@@ -55,41 +55,34 @@ def create_math_system_message():
55
  5. **Alternative Methods** - Mention other possible approaches
56
 
57
  Always be precise, educational, and encourage mathematical thinking."""
58
- def respond(
59
- message,
60
- history: list[dict[str, str]],
61
- system_message,
62
- max_tokens,
63
- temperature,
64
- top_p,
65
- ):
66
- def render_latex(text):
67
- # Wrap inline math properly
68
- text = re.sub(r'\$(.*?)\$', r'$$\1$$', text) # Display mode
69
  return text
70
 
71
- yield render_latex(response)
72
-
73
- # Show initial thinking message
74
  yield "🤔 Thinking for the suitable answer..."
75
 
76
- """
77
- Enhanced response function for mathematical problem solving
78
- """
79
- # Use Qwen Math model for better mathematical reasoning
80
- client = InferenceClient(
81
- model="Qwen/Qwen2.5-Math-7B-Instruct" # Specialized math model
82
- )
83
-
84
- # Build message history
85
  messages = [{"role": "system", "content": system_message}]
86
  messages.extend(history)
87
  messages.append({"role": "user", "content": message})
88
-
89
  response = ""
90
- # Add this line to ensure no truncation
91
- max_tokens = max(max_tokens, 1536) # Floor at 1536 for complex math
92
-
93
  try:
94
  for message_chunk in client.chat_completion(
95
  messages,
@@ -99,12 +92,19 @@ def respond(
99
  top_p=top_p,
100
  ):
101
  choices = message_chunk.choices
102
- token = ""
103
  if len(choices) and choices[0].delta.content:
104
  token = choices[0].delta.content
105
-
106
- response += token
107
- yield response
 
 
 
 
 
 
 
 
108
 
109
  except Exception as e:
110
  error_msg = f"❌ **Error**: {str(e)}\n\n💡 **Troubleshooting**:\n- Make sure you're logged in with Hugging Face\n- Check if the model is accessible\n- Try a simpler problem first"
@@ -116,17 +116,22 @@ def get_random_sample():
116
  return random.choice(math_samples)
117
  return "Solve for x: 2x² + 5x - 3 = 0"
118
 
119
- def set_difficulty_preset(difficulty):
120
- """Set temperature and other params based on difficulty"""
121
- presets = {
122
- "Elementary": (0.2, 0.8, 512), # Low temp for accuracy
123
- "High School": (0.3, 0.85, 768), # Balanced
124
- "College": (0.4, 0.9, 1024), # More exploration
125
- "Competition": (0.3, 0.8, 1536) # Detailed solutions
126
- }
127
- return presets.get(difficulty, (0.3, 0.85, 768))
128
 
129
- # Create the enhanced ChatInterface
 
 
 
 
 
 
 
 
 
 
130
  chatbot = gr.ChatInterface(
131
  respond,
132
  type="messages",
@@ -144,79 +149,40 @@ chatbot = gr.ChatInterface(
144
  lines=3,
145
  max_lines=10
146
  ),
147
- gr.Slider(
148
- minimum=256,
149
- maximum=2048,
150
- value=768,
151
- step=64,
152
- label="📝 Max Tokens (Solution Length)"
153
- ),
154
- gr.Slider(
155
- minimum=0.1,
156
- maximum=1.0,
157
- value=0.3,
158
- step=0.1,
159
- label="🎯 Temperature (Creativity vs Precision)"
160
- ),
161
- gr.Slider(
162
- minimum=0.1,
163
- maximum=1.0,
164
- value=0.85,
165
- step=0.05,
166
- label="🔍 Top-p (Response Diversity)",
167
- ),
168
  ],
169
  examples=[
170
  ["What is the derivative of f(x) = 3x² + 2x - 1?"],
171
  ["A triangle has sides of length 5, 12, and 13. What is its area?"],
172
- ["If log₂(x) + log₂(x+6) = 4, find the value of x."],
173
- ["A bag contains 5 red balls and 7 blue balls. What's the probability of drawing 2 red balls without replacement?"],
174
  ["Find the limit: lim(x→0) (sin(x)/x)"],
175
- ["Solve the system of equations: x + 2y = 7 and 3x - y = 4"],
176
- ["What is the integral of ∫(2x³ - 5x + 3)dx?"],
177
- ["In a right triangle, if one angle is 30° and the hypotenuse is 10, find the lengths of the other two sides."]
178
  ],
179
- cache_examples=False, # Don't cache for dynamic math problems
180
  concurrency_limit=10,
181
  )
182
 
183
- # Enhanced interface with additional features
184
  with gr.Blocks(
185
  title="🧮 Mathetics AI",
186
  theme=gr.themes.Soft(),
187
  css="""
188
- .math-highlight {
189
- background-color: #f0f8ff;
190
- padding: 10px;
191
- border-left: 4px solid #4CAF50;
192
- margin: 10px 0;
193
- border-radius: 5px;
194
- }
195
- .difficulty-selector {
196
- background-color: #fff3e0;
197
- padding: 15px;
198
- border-radius: 10px;
199
- margin: 10px 0;
200
- }
201
  """
202
  ) as demo:
203
 
204
- gr.Markdown("""
205
- # 🧮 **Mathetics AI** - Advanced Mathematics Solver
206
 
207
- **Your Personal AI Math Tutor** | Specialized in step-by-step problem solving across all mathematical domains
208
- """)
209
-
210
-
211
  with gr.Row():
212
  with gr.Column(scale=4):
213
- # Main chat interface
214
  chatbot.render()
215
 
216
  with gr.Column(scale=1):
217
- # Sidebar with additional features
218
  with gr.Accordion("🎲 **Quick Actions**", open=True):
219
-
220
  difficulty_preset = gr.Dropdown(
221
  choices=["Elementary", "High School", "College", "Competition"],
222
  value="High School",
@@ -224,54 +190,29 @@ with gr.Blocks(
224
  elem_classes=["difficulty-selector"]
225
  )
226
 
227
- sample_btn = gr.Button("🎯 Get Sample Problem", variant="secondary", size="sm")
228
- help_btn = gr.Button("❓ Math Help Tips", variant="secondary", size="sm")
229
 
230
- # Quick math tools
231
  gr.Markdown("### 🔧 **Quick Tools**")
232
- gr.Markdown("""
233
- - **Algebra**: Equations, inequalities, factoring
234
- - **Geometry**: Area, volume, trigonometry
235
- - **Calculus**: Derivatives, integrals, limits
236
- - **Statistics**: Probability, distributions
237
- - **Number Theory**: Prime factorization, GCD/LCM
238
- """)
239
 
240
- # Footer with information
241
  gr.Markdown("""
242
  ---
243
- **🔧 Technical Details:**
244
- - **Model**: Qwen/Qwen2.5-Math-7B-Instruct (Specialized for Mathematics)
245
- - **Features**: Real-time streaming responses, step-by-step solutions
246
 
247
- **💡 Usage Tips:**
248
- - Be specific about what you want to find or solve
249
- - Mention if you want step-by-step solutions
250
- - Ask for alternative solution methods
251
- - Request verification of your own solutions
252
  """)
253
-
254
 
255
- # Event handlers for additional functionality
256
- def insert_sample(difficulty):
257
- sample = get_random_sample()
258
- return sample
259
-
260
- def show_help():
261
- return """**Math Help Tips:**
262
-
263
- 1. **Be Specific**: "Find the derivative of..." instead of "Help with calculus"
264
- 2. **Show Your Work**: "I got x=5, is this correct?"
265
- 3. **Ask for Steps**: "Show me step-by-step how to solve..."
266
- 4. **Request Verification**: "Check my solution to this problem"
267
- 5. **Alternative Methods**: "What's another way to solve this?"
268
- """
269
-
270
- # Connect sample button (this would need to be integrated with the chat properly)
271
  sample_btn.click(
272
- lambda x: get_random_sample(),
273
  inputs=[difficulty_preset],
274
- outputs=[] # Would need proper integration with chat input
 
 
 
 
 
275
  )
276
 
277
  if __name__ == "__main__":
 
4
  from datasets import load_dataset
5
  import random
6
  import re
7
+
8
  # Load math datasets for sample problems
9
  fw = load_dataset("HuggingFaceFW/fineweb-edu", name="sample-10BT", split="train", streaming=True)
10
  ds = load_dataset("HuggingFaceH4/ultrachat_200k", streaming=True)
11
+
12
  def load_sample_problems():
13
  """Load sample problems from math datasets"""
14
  try:
 
15
  gsm8k = load_dataset("openai/gsm8k", "main", streaming=True)
16
  samples = []
17
  for i, item in enumerate(gsm8k["train"]):
18
  samples.append(item["question"])
19
+ if i >= 50:
20
  break
21
  return samples
22
  except:
 
28
  "Solve the system: x + 2y = 7, 3x - y = 4"
29
  ]
30
 
 
31
  math_samples = load_sample_problems()
32
 
33
  def create_math_system_message():
 
55
  5. **Alternative Methods** - Mention other possible approaches
56
 
57
  Always be precise, educational, and encourage mathematical thinking."""
58
+
59
+ def render_latex(text):
60
+ """Format math expressions for LaTeX rendering"""
61
+ if not text: # Safety check
62
+ return text
63
+ # Wrap inline math: x^2 → $x^2$
64
+ text = re.sub(r'\b([a-zA-Z]+)\^?(\d*)\b', r'$\1^{\2}$', text)
65
+ # Wrap functions: sin(x) → $\sin(x)$
66
+ text = re.sub(r'([a-zA-Z]+)\(([^)]+)\)', r'$\1(\2)$', text)
67
+ # Wrap limits: lim(x→0) → $\lim_{x\to0}$
68
+ text = re.sub(r'lim\(([a-z])→([0-9.]+)\)', r'$\lim_{ \1 \to \2}$', text)
69
  return text
70
 
71
+ def respond(message, history, system_message, max_tokens, temperature, top_p):
72
+ # Show initial thinking
 
73
  yield "🤔 Thinking for the suitable answer..."
74
 
75
+ # Use Qwen Math model
76
+ client = InferenceClient(model="Qwen/Qwen2.5-Math-7B-Instruct")
77
+
78
+ # Build messages
 
 
 
 
 
79
  messages = [{"role": "system", "content": system_message}]
80
  messages.extend(history)
81
  messages.append({"role": "user", "content": message})
82
+
83
  response = ""
84
+ max_tokens = max(max_tokens, 1536) # Ensure no truncation
85
+
 
86
  try:
87
  for message_chunk in client.chat_completion(
88
  messages,
 
92
  top_p=top_p,
93
  ):
94
  choices = message_chunk.choices
 
95
  if len(choices) and choices[0].delta.content:
96
  token = choices[0].delta.content
97
+ response += token
98
+ # Stream raw during generation, format at end
99
+ if "```" in response or len(response) > 500: # Format after substantial response
100
+ formatted = render_latex(response)
101
+ yield formatted
102
+ else:
103
+ yield response
104
+
105
+ # Final formatted response
106
+ final_formatted = render_latex(response)
107
+ yield final_formatted
108
 
109
  except Exception as e:
110
  error_msg = f"❌ **Error**: {str(e)}\n\n💡 **Troubleshooting**:\n- Make sure you're logged in with Hugging Face\n- Check if the model is accessible\n- Try a simpler problem first"
 
116
  return random.choice(math_samples)
117
  return "Solve for x: 2x² + 5x - 3 = 0"
118
 
119
+ def insert_sample_to_chat(difficulty):
120
+ """Insert random sample into chat input"""
121
+ sample = get_random_sample()
122
+ return "", [None, sample] # Clear input, add sample to chat
 
 
 
 
 
123
 
124
+ def show_help():
125
+ return """**Math Help Tips:**
126
+
127
+ 1. **Be Specific**: "Find the derivative of..." instead of "Help with calculus"
128
+ 2. **Show Your Work**: "I got x=5, is this correct?"
129
+ 3. **Ask for Steps**: "Show me step-by-step how to solve..."
130
+ 4. **Request Verification**: "Check my solution to this problem"
131
+ 5. **Alternative Methods**: "What's another way to solve this?"
132
+ """
133
+
134
+ # Create ChatInterface
135
  chatbot = gr.ChatInterface(
136
  respond,
137
  type="messages",
 
149
  lines=3,
150
  max_lines=10
151
  ),
152
+ gr.Slider(minimum=256, maximum=2048, value=768, step=64, label="📝 Max Tokens"),
153
+ gr.Slider(minimum=0.1, maximum=1.0, value=0.3, step=0.1, label="🎯 Temperature"),
154
+ gr.Slider(minimum=0.1, maximum=1.0, value=0.85, step=0.05, label="🔍 Top-p"),
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
155
  ],
156
  examples=[
157
  ["What is the derivative of f(x) = 3x² + 2x - 1?"],
158
  ["A triangle has sides of length 5, 12, and 13. What is its area?"],
 
 
159
  ["Find the limit: lim(x→0) (sin(x)/x)"],
160
+ ["Solve the system: x + 2y = 7, 3x - y = 4"],
161
+ ["What is the integral of ∫(2x³ - 5x + 3)dx?"]
 
162
  ],
163
+ cache_examples=False,
164
  concurrency_limit=10,
165
  )
166
 
167
+ # Main interface
168
  with gr.Blocks(
169
  title="🧮 Mathetics AI",
170
  theme=gr.themes.Soft(),
171
  css="""
172
+ .math-highlight { background-color: #f0f8ff; padding: 10px; border-left: 4px solid #4CAF50; margin: 10px 0; border-radius: 5px; }
173
+ .difficulty-selector { background-color: #fff3e0; padding: 15px; border-radius: 10px; margin: 10px 0; }
174
+ .markdown math { font-size: 1.1em; }
 
 
 
 
 
 
 
 
 
 
175
  """
176
  ) as demo:
177
 
178
+ gr.Markdown("# 🧮 **Mathematics AI** - Advanced Mathematics Solver\n**Your Personal AI Math Tutor**")
 
179
 
 
 
 
 
180
  with gr.Row():
181
  with gr.Column(scale=4):
 
182
  chatbot.render()
183
 
184
  with gr.Column(scale=1):
 
185
  with gr.Accordion("🎲 **Quick Actions**", open=True):
 
186
  difficulty_preset = gr.Dropdown(
187
  choices=["Elementary", "High School", "College", "Competition"],
188
  value="High School",
 
190
  elem_classes=["difficulty-selector"]
191
  )
192
 
193
+ sample_btn = gr.Button("🎯 Get Sample Problem", variant="secondary")
194
+ help_btn = gr.Button("❓ Math Help Tips", variant="secondary")
195
 
 
196
  gr.Markdown("### 🔧 **Quick Tools**")
197
+ gr.Markdown("- **Algebra**: Equations, inequalities, factoring\n- **Geometry**: Area, volume, trigonometry\n- **Calculus**: Derivatives, integrals, limits\n- **Statistics**: Probability, distributions\n- **Number Theory**: Prime factorization, GCD/LCM")
 
 
 
 
 
 
198
 
 
199
  gr.Markdown("""
200
  ---
201
+ **🔧 Technical Details:** Qwen/Qwen2.5-Math-7B-Instruct | Real-time streaming responses
 
 
202
 
203
+ **💡 Usage Tips:** Be specific • Request step-by-step • Ask for verification
 
 
 
 
204
  """)
 
205
 
206
+ # Event handlers
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
207
  sample_btn.click(
208
+ insert_sample_to_chat,
209
  inputs=[difficulty_preset],
210
+ outputs=[chatbot] # Actually feeds into chat now!
211
+ )
212
+
213
+ help_btn.click(
214
+ show_help,
215
+ outputs=gr.Markdown() # Could show in a separate output
216
  )
217
 
218
  if __name__ == "__main__":