Spaces:
Running
Running
Update src/translate/Translate.py
Browse files
src/translate/Translate.py
CHANGED
|
@@ -80,7 +80,7 @@ def gemma_direct(requestValue: str, model: str = 'Gargaz/gemma-2b-romanian-bette
|
|
| 80 |
|
| 81 |
tokenizer = AutoTokenizer.from_pretrained("Gargaz/gemma-2b-romanian-better")
|
| 82 |
model = AutoModelForCausalLM.from_pretrained("Gargaz/gemma-2b-romanian-better").to(device)
|
| 83 |
-
input_ids = tokenizer.encode(
|
| 84 |
num_tokens = len(input_ids)
|
| 85 |
# Estimate output length (e.g., 50% longer)
|
| 86 |
max_new_tokens = int(num_tokens * 1.5)
|
|
|
|
| 80 |
|
| 81 |
tokenizer = AutoTokenizer.from_pretrained("Gargaz/gemma-2b-romanian-better")
|
| 82 |
model = AutoModelForCausalLM.from_pretrained("Gargaz/gemma-2b-romanian-better").to(device)
|
| 83 |
+
input_ids = tokenizer.encode(requestValue, add_special_tokens=True)
|
| 84 |
num_tokens = len(input_ids)
|
| 85 |
# Estimate output length (e.g., 50% longer)
|
| 86 |
max_new_tokens = int(num_tokens * 1.5)
|