Spaces:
Runtime error
Runtime error
Commit
·
1162de7
1
Parent(s):
6e36201
relax token reqs
Browse files
app.py
CHANGED
|
@@ -131,7 +131,7 @@ def sliding_window_prediction(template, text, model, tokenizer, window_size=4000
|
|
| 131 |
|
| 132 |
# Load the model and tokenizer
|
| 133 |
model_name = "numind/NuExtract-v1.5"
|
| 134 |
-
auth_token = os.environ.get("HF_TOKEN") or
|
| 135 |
model = AutoModelForCausalLM.from_pretrained(model_name,
|
| 136 |
trust_remote_code=True,
|
| 137 |
torch_dtype=torch.bfloat16,
|
|
@@ -152,7 +152,7 @@ def gradio_interface_function(template, text, is_example):
|
|
| 152 |
# yield gr.update(value=chunk_info), gr.update(value=progress), gr.update(value=full_pred), gr.update(value=html_content)
|
| 153 |
yield progress, full_pred, html_content
|
| 154 |
|
| 155 |
-
if not is_example:
|
| 156 |
log_event(text, template, full_pred)
|
| 157 |
|
| 158 |
|
|
|
|
| 131 |
|
| 132 |
# Load the model and tokenizer
|
| 133 |
model_name = "numind/NuExtract-v1.5"
|
| 134 |
+
auth_token = os.environ.get("HF_TOKEN") or False
|
| 135 |
model = AutoModelForCausalLM.from_pretrained(model_name,
|
| 136 |
trust_remote_code=True,
|
| 137 |
torch_dtype=torch.bfloat16,
|
|
|
|
| 152 |
# yield gr.update(value=chunk_info), gr.update(value=progress), gr.update(value=full_pred), gr.update(value=html_content)
|
| 153 |
yield progress, full_pred, html_content
|
| 154 |
|
| 155 |
+
if not is_example and os.environ["LOG_SERVER"] is not None:
|
| 156 |
log_event(text, template, full_pred)
|
| 157 |
|
| 158 |
|