Spaces:
Runtime error
Runtime error
debug
Browse files
app.py
CHANGED
|
@@ -14,6 +14,7 @@ MOCK = False
|
|
| 14 |
TEST_FOLDER = "c4f5"
|
| 15 |
|
| 16 |
MODEL_NAME="xu3kev/deepseekcoder-7b-logo-pbe"
|
|
|
|
| 17 |
import torch
|
| 18 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
| 19 |
hug_model = AutoModelForCausalLM.from_pretrained(MODEL_NAME, torch_dtype=torch.float16,).to('cuda')
|
|
@@ -222,7 +223,7 @@ def llm_call(question_prompt, model_name,
|
|
| 222 |
top_p=1, n_samples=64, stop=None):
|
| 223 |
if HUGGINGFACE:
|
| 224 |
model_inputs = hug_tokenizer([question_prompt], return_tensors="pt").to('cuda')
|
| 225 |
-
generated_ids = hug_model.generate(**model_inputs, max_length=1400, temperature=1, num_return_sequences=
|
| 226 |
responses = hug_tokenizer.batch_decode(generated_ids, skip_special_tokens=True)
|
| 227 |
codes = []
|
| 228 |
for response in responses:
|
|
@@ -420,18 +421,18 @@ def main():
|
|
| 420 |
output_image = gr.Image(label="output")
|
| 421 |
|
| 422 |
submit_button.click(img_to_code_img, inputs=canvas, outputs=output_image)
|
| 423 |
-
|
| 424 |
-
|
| 425 |
-
|
| 426 |
-
|
| 427 |
-
|
| 428 |
-
|
| 429 |
-
|
| 430 |
-
|
| 431 |
-
|
| 432 |
-
|
| 433 |
-
|
| 434 |
-
|
| 435 |
|
| 436 |
demo.launch(share=True)
|
| 437 |
|
|
|
|
| 14 |
TEST_FOLDER = "c4f5"
|
| 15 |
|
| 16 |
MODEL_NAME="xu3kev/deepseekcoder-7b-logo-pbe"
|
| 17 |
+
MODEL_NAME="microsoft/Phi-3-small-8k-instruct"
|
| 18 |
import torch
|
| 19 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
| 20 |
hug_model = AutoModelForCausalLM.from_pretrained(MODEL_NAME, torch_dtype=torch.float16,).to('cuda')
|
|
|
|
| 223 |
top_p=1, n_samples=64, stop=None):
|
| 224 |
if HUGGINGFACE:
|
| 225 |
model_inputs = hug_tokenizer([question_prompt], return_tensors="pt").to('cuda')
|
| 226 |
+
generated_ids = hug_model.generate(**model_inputs, max_length=1400, temperature=1, num_return_sequences=1, do_sample=True)
|
| 227 |
responses = hug_tokenizer.batch_decode(generated_ids, skip_special_tokens=True)
|
| 228 |
codes = []
|
| 229 |
for response in responses:
|
|
|
|
| 421 |
output_image = gr.Image(label="output")
|
| 422 |
|
| 423 |
submit_button.click(img_to_code_img, inputs=canvas, outputs=output_image)
|
| 424 |
+
# demo.load(
|
| 425 |
+
# None,
|
| 426 |
+
# None,
|
| 427 |
+
# js="""
|
| 428 |
+
# () => {
|
| 429 |
+
# const params = new URLSearchParams(window.location.search);
|
| 430 |
+
# if (!params.has('__theme')) {
|
| 431 |
+
# params.set('__theme', 'light');
|
| 432 |
+
# window.location.search = params.toString();
|
| 433 |
+
# }
|
| 434 |
+
# }""",
|
| 435 |
+
# )
|
| 436 |
|
| 437 |
demo.launch(share=True)
|
| 438 |
|