Spaces:
Running
on
Zero
Running
on
Zero
Commit
·
5814ab0
1
Parent(s):
6bf616b
Updated package versions in requirements files
Browse files- requirements.txt +1 -2
- requirements_aws.txt +13 -11
- requirements_gpu.txt +14 -15
requirements.txt
CHANGED
|
@@ -1,4 +1,3 @@
|
|
| 1 |
-
#hdbscan==0.8.40
|
| 2 |
pandas==2.3.3
|
| 3 |
plotly==6.3.1
|
| 4 |
scikit-learn==1.7.2
|
|
@@ -22,6 +21,6 @@ torch==2.6.0 --extra-index-url https://download.pytorch.org/whl/cu124
|
|
| 22 |
#https://github.com/abetlen/llama-cpp-python/releases/download/v0.3.2/llama_cpp_python-0.3.2-cp311-cp311-win_amd64.whl # Exact wheel specified for windows
|
| 23 |
#llama-cpp-python==0.3.2 --extra-index-url https://abetlen.github.io/llama-cpp-python/whl/cpu
|
| 24 |
# Specify exact llama_cpp wheel for huggingface compatibility
|
| 25 |
-
|
| 26 |
spaces==0.42.1
|
| 27 |
numpy==2.2.6
|
|
|
|
|
|
|
| 1 |
pandas==2.3.3
|
| 2 |
plotly==6.3.1
|
| 3 |
scikit-learn==1.7.2
|
|
|
|
| 21 |
#https://github.com/abetlen/llama-cpp-python/releases/download/v0.3.2/llama_cpp_python-0.3.2-cp311-cp311-win_amd64.whl # Exact wheel specified for windows
|
| 22 |
#llama-cpp-python==0.3.2 --extra-index-url https://abetlen.github.io/llama-cpp-python/whl/cpu
|
| 23 |
# Specify exact llama_cpp wheel for huggingface compatibility
|
| 24 |
+
https://github.com/abetlen/llama-cpp-python/releases/download/v0.3.4-cu121/llama_cpp_python-0.3.4-cp310-cp310-linux_x86_64.whl
|
| 25 |
spaces==0.42.1
|
| 26 |
numpy==2.2.6
|
requirements_aws.txt
CHANGED
|
@@ -1,12 +1,11 @@
|
|
| 1 |
-
|
| 2 |
-
|
| 3 |
-
|
| 4 |
-
|
| 5 |
-
|
| 6 |
-
|
| 7 |
-
spacy==3.8.0
|
| 8 |
en_core_web_sm @ https://github.com/explosion/spacy-models/releases/download/en_core_web_sm-3.8.0/en_core_web_sm-3.8.0.tar.gz
|
| 9 |
-
gradio==5.
|
| 10 |
pyarrow
|
| 11 |
openpyxl
|
| 12 |
Faker
|
|
@@ -14,6 +13,9 @@ presidio_analyzer==2.2.35
|
|
| 14 |
presidio_anonymizer==2.2.35
|
| 15 |
scipy
|
| 16 |
polars
|
| 17 |
-
transformers==4.
|
| 18 |
-
|
| 19 |
-
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
pandas==2.3.3
|
| 2 |
+
plotly==6.3.1
|
| 3 |
+
scikit-learn==1.7.2
|
| 4 |
+
umap-learn==0.5.9.post2
|
| 5 |
+
boto3==1.40.55
|
| 6 |
+
spacy==3.8.7
|
|
|
|
| 7 |
en_core_web_sm @ https://github.com/explosion/spacy-models/releases/download/en_core_web_sm-3.8.0/en_core_web_sm-3.8.0.tar.gz
|
| 8 |
+
gradio==5.49.1
|
| 9 |
pyarrow
|
| 10 |
openpyxl
|
| 11 |
Faker
|
|
|
|
| 13 |
presidio_anonymizer==2.2.35
|
| 14 |
scipy
|
| 15 |
polars
|
| 16 |
+
transformers==4.57.1
|
| 17 |
+
accelerate==1.11.0
|
| 18 |
+
bertopic==0.17.3
|
| 19 |
+
sentence-transformers==5.1.1
|
| 20 |
+
spaces==0.42.1
|
| 21 |
+
numpy==2.2.6
|
requirements_gpu.txt
CHANGED
|
@@ -1,15 +1,14 @@
|
|
| 1 |
-
|
| 2 |
-
|
| 3 |
-
|
| 4 |
-
|
| 5 |
-
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
|
| 11 |
-
|
| 12 |
-
spacy==3.8.4
|
| 13 |
en_core_web_sm @ https://github.com/explosion/spacy-models/releases/download/en_core_web_sm-3.8.0/en_core_web_sm-3.8.0.tar.gz
|
| 14 |
pyarrow
|
| 15 |
openpyxl
|
|
@@ -19,7 +18,7 @@ presidio_anonymizer==2.2.355
|
|
| 19 |
scipy
|
| 20 |
polars
|
| 21 |
llama-cpp-python==0.3.4 --extra-index-url https://abetlen.github.io/llama-cpp-python/whl/cu121
|
| 22 |
-
sentence-transformers==
|
| 23 |
-
spaces
|
| 24 |
-
numpy==
|
| 25 |
|
|
|
|
| 1 |
+
pandas==2.3.3
|
| 2 |
+
plotly==6.3.1
|
| 3 |
+
scikit-learn==1.7.2
|
| 4 |
+
umap-learn==0.5.9.post2
|
| 5 |
+
gradio==5.49.1
|
| 6 |
+
boto3==1.40.55
|
| 7 |
+
transformers==4.57.1
|
| 8 |
+
accelerate==1.11.0
|
| 9 |
+
torch==2.6.0 --extra-index-url https://download.pytorch.org/whl/cu124
|
| 10 |
+
bertopic==0.17.3
|
| 11 |
+
spacy==3.8.7
|
|
|
|
| 12 |
en_core_web_sm @ https://github.com/explosion/spacy-models/releases/download/en_core_web_sm-3.8.0/en_core_web_sm-3.8.0.tar.gz
|
| 13 |
pyarrow
|
| 14 |
openpyxl
|
|
|
|
| 18 |
scipy
|
| 19 |
polars
|
| 20 |
llama-cpp-python==0.3.4 --extra-index-url https://abetlen.github.io/llama-cpp-python/whl/cu121
|
| 21 |
+
sentence-transformers==5.1.1
|
| 22 |
+
spaces==0.42.1
|
| 23 |
+
numpy==2.2.6
|
| 24 |
|