Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
|
@@ -3,15 +3,13 @@ import os
|
|
| 3 |
import subprocess
|
| 4 |
import random
|
| 5 |
import string
|
| 6 |
-
from huggingface_hub import cached_download, hf_hub_url
|
| 7 |
from transformers import pipeline, AutoModelForCausalLM, AutoTokenizer
|
| 8 |
import black
|
| 9 |
import pylint
|
| 10 |
from transformers import AutoModelForSequenceClassification, AutoTokenizer
|
| 11 |
from transformers import pipeline
|
| 12 |
from transformers import AutoModelForSeq2SeqLM, AutoTokenizer
|
| 13 |
-
from huggingface_hub import hf_hub_token
|
| 14 |
-
print(hf_hub_token())
|
| 15 |
|
| 16 |
# Define functions for each feature
|
| 17 |
|
|
@@ -255,10 +253,10 @@ if st.button("Launch Chat App"):
|
|
| 255 |
cwd = os.getcwd()
|
| 256 |
|
| 257 |
# User Authentication
|
| 258 |
-
|
| 259 |
-
|
| 260 |
-
|
| 261 |
-
|
| 262 |
|
| 263 |
# Construct the command to launch the chat app
|
| 264 |
command = f"cd projects/{project_name} && streamlit run chat_app.py"
|
|
|
|
| 3 |
import subprocess
|
| 4 |
import random
|
| 5 |
import string
|
| 6 |
+
from huggingface_hub import cached_download, hf_hub_url
|
| 7 |
from transformers import pipeline, AutoModelForCausalLM, AutoTokenizer
|
| 8 |
import black
|
| 9 |
import pylint
|
| 10 |
from transformers import AutoModelForSequenceClassification, AutoTokenizer
|
| 11 |
from transformers import pipeline
|
| 12 |
from transformers import AutoModelForSeq2SeqLM, AutoTokenizer
|
|
|
|
|
|
|
| 13 |
|
| 14 |
# Define functions for each feature
|
| 15 |
|
|
|
|
| 253 |
cwd = os.getcwd()
|
| 254 |
|
| 255 |
# User Authentication
|
| 256 |
+
hf_token = st.text_input("Enter your Hugging Face Token:")
|
| 257 |
+
if hf_token:
|
| 258 |
+
# Set the token using HfFolder
|
| 259 |
+
HfFolder.save_token(hf_token)
|
| 260 |
|
| 261 |
# Construct the command to launch the chat app
|
| 262 |
command = f"cd projects/{project_name} && streamlit run chat_app.py"
|