Update app.py
Browse files
app.py
CHANGED
|
@@ -3,6 +3,18 @@ from huggingface_hub import InferenceClient
|
|
| 3 |
from datasets import load_dataset
|
| 4 |
import random
|
| 5 |
import re
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 6 |
|
| 7 |
# Global datasets - load lazily
|
| 8 |
math_samples = None
|
|
|
|
| 3 |
from datasets import load_dataset
|
| 4 |
import random
|
| 5 |
import re
|
| 6 |
+
import requests
|
| 7 |
+
CLOUDFLARE_GATEWAY = "https://gateway.ai.cloudflare.com/v1/0db1612c8a7b7fe4af5459f6a1623c6a/looptunnel/workers-ai/@cf/meta/llama-3.1-8b-instruct \"
|
| 8 |
+
CF_TOKEN = "0QEwvGYXH_vbbQo2-fhyKdtqt7a9mGFoFSzpo_JJ"
|
| 9 |
+
|
| 10 |
+
def ask_ai(prompt):
|
| 11 |
+
headers = {
|
| 12 |
+
"Authorization": f"Bearer {CF_TOKEN}",
|
| 13 |
+
"Content-Type": "application/json"
|
| 14 |
+
}
|
| 15 |
+
data = {"prompt": prompt}
|
| 16 |
+
r = requests.post(CLOUDFLARE_GATEWAY, headers=headers, json=data)
|
| 17 |
+
return r.json()
|
| 18 |
|
| 19 |
# Global datasets - load lazily
|
| 20 |
math_samples = None
|