Spaces:
Runtime error
Runtime error
Changed model to Writer/Palmyra-Fin-70B-32K
Browse files
app.py
CHANGED
|
@@ -13,12 +13,14 @@ theme = gr.themes.Base(
|
|
| 13 |
font=[gr.themes.GoogleFont('Libre Franklin'), gr.themes.GoogleFont('Public Sans'), 'system-ui', 'sans-serif'],
|
| 14 |
)
|
| 15 |
|
| 16 |
-
tokenizer = AutoTokenizer.from_pretrained("
|
| 17 |
-
model = AutoModelForCausalLM.from_pretrained(
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
|
|
|
|
|
|
|
| 22 |
@spaces.GPU(enable_queue=True)
|
| 23 |
def generate_text(text, temperature, maxLen):
|
| 24 |
text = text.lstrip().lstrip('<s>').lstrip()
|
|
|
|
| 13 |
font=[gr.themes.GoogleFont('Libre Franklin'), gr.themes.GoogleFont('Public Sans'), 'system-ui', 'sans-serif'],
|
| 14 |
)
|
| 15 |
|
| 16 |
+
tokenizer = AutoTokenizer.from_pretrained("Writer/Palmyra-Fin-70B-32K")
|
| 17 |
+
model = AutoModelForCausalLM.from_pretrained("Writer/Palmyra-Fin-70B-32K")
|
| 18 |
+
#tokenizer = AutoTokenizer.from_pretrained("yam-peleg/Experiment26-7B", trust_remote_code=True)
|
| 19 |
+
#model = AutoModelForCausalLM.from_pretrained(
|
| 20 |
+
# "yam-peleg/Experiment26-7B",
|
| 21 |
+
# torch_dtype=torch.float16 if torch.cuda.is_available() else torch.float32,
|
| 22 |
+
# trust_remote_code=True,
|
| 23 |
+
#).to(device)
|
| 24 |
@spaces.GPU(enable_queue=True)
|
| 25 |
def generate_text(text, temperature, maxLen):
|
| 26 |
text = text.lstrip().lstrip('<s>').lstrip()
|