Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -68,6 +68,8 @@ login(token=("hf_" + HF_TOKEN))
|
|
| 68 |
|
| 69 |
system_propmpt = system_sr
|
| 70 |
|
|
|
|
|
|
|
| 71 |
# "facebook/blenderbot-400M-distill", facebook/blenderbot-400M-distill, stabilityai/stablelm-zephyr-3b, BAAI/bge-small-en-v1.5
|
| 72 |
Settings.llm = HuggingFaceInferenceAPI(model_name=LLM_NAME,
|
| 73 |
device_map="auto",
|
|
@@ -80,7 +82,8 @@ Settings.llm = HuggingFaceInferenceAPI(model_name=LLM_NAME,
|
|
| 80 |
"top_k": top_k, "do_sample": False },
|
| 81 |
# tokenizer_kwargs={"max_length": 4096},
|
| 82 |
tokenizer_name=LLM_NAME,
|
| 83 |
-
|
|
|
|
| 84 |
# provider="auto",
|
| 85 |
# task="None"
|
| 86 |
)
|
|
|
|
| 68 |
|
| 69 |
system_propmpt = system_sr
|
| 70 |
|
| 71 |
+
client = InferenceClient(LLM_NAME)
|
| 72 |
+
|
| 73 |
# "facebook/blenderbot-400M-distill", facebook/blenderbot-400M-distill, stabilityai/stablelm-zephyr-3b, BAAI/bge-small-en-v1.5
|
| 74 |
Settings.llm = HuggingFaceInferenceAPI(model_name=LLM_NAME,
|
| 75 |
device_map="auto",
|
|
|
|
| 82 |
"top_k": top_k, "do_sample": False },
|
| 83 |
# tokenizer_kwargs={"max_length": 4096},
|
| 84 |
tokenizer_name=LLM_NAME,
|
| 85 |
+
hf_token = "hf_" + HF_TOKEN,
|
| 86 |
+
# api_key="zpka_45536f339a3d403e997bac9158f9ebd2_49d2a58e",
|
| 87 |
# provider="auto",
|
| 88 |
# task="None"
|
| 89 |
)
|