Spaces:
Sleeping
Sleeping
| # Use lightweight Python base image | |
| FROM python:3.10-slim | |
| # Set working directory | |
| WORKDIR /app | |
| # Install system dependencies (for PyTorch and git) | |
| RUN apt-get update && apt-get install -y git && rm -rf /var/lib/apt/lists/* | |
| # Copy requirements and install | |
| COPY requirements.txt . | |
| RUN pip install --no-cache-dir -r requirements.txt | |
| # Copy application code | |
| COPY . . | |
| # ✅ Hugging Face cache directory (use /tmp, writable in Spaces) | |
| ENV HF_HOME=/tmp | |
| # Pre-download model into /tmp to avoid cold start | |
| RUN python -c "from transformers import AutoTokenizer, AutoModelForCausalLM; \ | |
| model_id='rabiyulfahim/qa_python_gpt2'; \ | |
| AutoTokenizer.from_pretrained(model_id, cache_dir='/tmp'); \ | |
| AutoModelForCausalLM.from_pretrained(model_id, cache_dir='/tmp')" | |
| # Expose the correct port (Hugging Face default is 7860) | |
| EXPOSE 7860 | |
| # Command to run the app | |
| CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"] | |