Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -39,6 +39,17 @@ model_name = "daekeun-ml/Llama-2-ko-instruct-13B"
|
|
| 39 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
| 40 |
model = AutoDistributedModelForCausalLM.from_pretrained(model_name)
|
| 41 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 42 |
def chat(id, npc, prompt):
|
| 43 |
|
| 44 |
# get_coin endpoint
|
|
@@ -52,13 +63,13 @@ def chat(id, npc, prompt):
|
|
| 52 |
return "no coin"
|
| 53 |
|
| 54 |
# model inference
|
| 55 |
-
|
| 56 |
-
|
| 57 |
-
|
| 58 |
-
|
| 59 |
-
|
| 60 |
-
|
| 61 |
-
|
| 62 |
|
| 63 |
|
| 64 |
# add_transaction endpoint
|
|
|
|
| 39 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
| 40 |
model = AutoDistributedModelForCausalLM.from_pretrained(model_name)
|
| 41 |
|
| 42 |
+
|
| 43 |
+
def check(model_name):
|
| 44 |
+
data = requests.get("https://health.petals.dev/api/v1/state").json()
|
| 45 |
+
out = []
|
| 46 |
+
for d in data['model_reports']:
|
| 47 |
+
if d['name'] == model_name:
|
| 48 |
+
if d['state']=="healthy":
|
| 49 |
+
return True
|
| 50 |
+
return False
|
| 51 |
+
|
| 52 |
+
|
| 53 |
def chat(id, npc, prompt):
|
| 54 |
|
| 55 |
# get_coin endpoint
|
|
|
|
| 63 |
return "no coin"
|
| 64 |
|
| 65 |
# model inference
|
| 66 |
+
if check:
|
| 67 |
+
prom = ""
|
| 68 |
+
inputs = tokenizer(prom, return_tensors="pt")["input_ids"]
|
| 69 |
+
outputs = model.generate(inputs, max_new_tokens=100)
|
| 70 |
+
print(tokenizer.decode(outputs[0]))
|
| 71 |
+
else:
|
| 72 |
+
output = "no model"
|
| 73 |
|
| 74 |
|
| 75 |
# add_transaction endpoint
|