Spaces:
Runtime error
Runtime error
Commit
Β·
99bd29e
1
Parent(s):
b8c6e22
initial commit
Browse files
gen.py
CHANGED
|
@@ -3,7 +3,10 @@ import sys
|
|
| 3 |
from transformers import AutoModelForCausalLM, AutoTokenizer, BitsAndBytesConfig
|
| 4 |
import json
|
| 5 |
|
| 6 |
-
|
|
|
|
|
|
|
|
|
|
| 7 |
|
| 8 |
# Configure 4-bit quantization using BitsAndBytesConfig
|
| 9 |
quantization_config = BitsAndBytesConfig(
|
|
@@ -17,10 +20,10 @@ model = AutoModelForCausalLM.from_pretrained(
|
|
| 17 |
'google/gemma-2-2b-it',
|
| 18 |
device_map="auto",
|
| 19 |
quantization_config=quantization_config,
|
|
|
|
| 20 |
)
|
| 21 |
|
| 22 |
|
| 23 |
-
|
| 24 |
# Definir el prompt para generar un JSON con eventos anidados
|
| 25 |
prompt = (
|
| 26 |
"Genera un JSON que describa una serie de eventos consecutivos en un formato similar al siguiente:\n\n"
|
|
|
|
| 3 |
from transformers import AutoModelForCausalLM, AutoTokenizer, BitsAndBytesConfig
|
| 4 |
import json
|
| 5 |
|
| 6 |
+
# Get the HF_TOKEN from the environment variable (set by the Space)
|
| 7 |
+
hf_token = os.getenv("HF_TOKEN")
|
| 8 |
+
|
| 9 |
+
tokenizer = AutoTokenizer.from_pretrained('google/gemma-2-2b-it', use_auth_token=hf_token)
|
| 10 |
|
| 11 |
# Configure 4-bit quantization using BitsAndBytesConfig
|
| 12 |
quantization_config = BitsAndBytesConfig(
|
|
|
|
| 20 |
'google/gemma-2-2b-it',
|
| 21 |
device_map="auto",
|
| 22 |
quantization_config=quantization_config,
|
| 23 |
+
use_auth_token=hf_token
|
| 24 |
)
|
| 25 |
|
| 26 |
|
|
|
|
| 27 |
# Definir el prompt para generar un JSON con eventos anidados
|
| 28 |
prompt = (
|
| 29 |
"Genera un JSON que describa una serie de eventos consecutivos en un formato similar al siguiente:\n\n"
|