Spaces:
				
			
			
	
			
			
		Running
		
			on 
			
			Zero
	
	
	
			
			
	
	
	
	
		
		
		Running
		
			on 
			
			Zero
	| pandas==2.3.3 | |
| plotly==6.3.1 | |
| scikit-learn==1.7.2 | |
| umap-learn==0.5.9.post2 | |
| gradio==5.49.1 | |
| boto3==1.40.55 | |
| transformers==4.57.1 | |
| accelerate==1.11.0 | |
| bertopic==0.17.3 | |
| spacy==3.8.7 | |
| en_core_web_sm @ https://github.com/explosion/spacy-models/releases/download/en_core_web_sm-3.8.0/en_core_web_sm-3.8.0.tar.gz | |
| pyarrow==21.0.0 | |
| openpyxl==3.1.5 | |
| Faker==37.11.0 | |
| presidio_analyzer==2.2.360 | |
| presidio_anonymizer==2.2.360 | |
| scipy==1.15.3 | |
| polars==1.34.0 | |
| sentence-transformers==5.1.1 | |
| torch==2.6.0 --extra-index-url https://download.pytorch.org/whl/cu124 | |
| #https://github.com/abetlen/llama-cpp-python/releases/download/v0.3.2/llama_cpp_python-0.3.2-cp311-cp311-win_amd64.whl # Exact wheel specified for windows | |
| #llama-cpp-python==0.3.2 --extra-index-url https://abetlen.github.io/llama-cpp-python/whl/cpu | |
| # Specify exact llama_cpp wheel for huggingface compatibility | |
| https://github.com/abetlen/llama-cpp-python/releases/download/v0.3.4-cu121/llama_cpp_python-0.3.4-cp310-cp310-linux_x86_64.whl | |
| spaces==0.42.1 | |
| numpy==2.2.6 |