m97j's picture
Initial commit
5fc69e4
raw
history blame
2.1 kB
# ----------- 베이슀 이미지 -----------
FROM python:3.10-slim
# ----------- μž‘μ—… 디렉토리 -----------
WORKDIR /app
# ----------- μ‹œμŠ€ν…œ νŒ¨ν‚€μ§€ μ„€μΉ˜ -----------
RUN apt-get update && apt-get install -y --no-install-recommends \
build-essential \
cmake \
&& rm -rf /var/lib/apt/lists/*
# ----------- μ˜μ‘΄μ„± μ„€μΉ˜ -----------
# requirements λ¨Όμ € 볡사 β†’ μΊμ‹œ ν™œμš© κ°€λŠ₯
COPY requirements.txt /app/requirements.txt
# PyTorch CPU 버전 μ„€μΉ˜ (GPU ν•„μš” μ—†μŒ)
RUN pip install --no-cache-dir torch==2.0.1+cpu -f https://download.pytorch.org/whl/cpu/torch_stable.html \
&& pip install --no-cache-dir -r /app/requirements.txt
# ----------- μ½”λ“œ 볡사 -----------
# μ˜μ‘΄μ„± μ„€μΉ˜ ν›„ μ½”λ“œ 볡사 β†’ requirements λ³€κ²½ μ—†λŠ” ν•œ μΊμ‹œ μž¬μ‚¬μš© κ°€λŠ₯
COPY . /app/
# ----------- λͺ¨λΈ λ‹€μš΄λ‘œλ“œ -----------
# transformers λͺ¨λΈ λ‹€μš΄λ‘œλ“œ ν›„ 이미지에 포함
RUN python -c "from transformers import AutoTokenizer, AutoModel; \
models = { \
'emotion': ('tae898/emoberta-base-ko', './models/emotion-classification-model'), \
'fallback': ('skt/ko-gpt-trinity-1.2B-v0.5', './models/fallback-npc-model'), \
'embedder': ('sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2', './models/sentence-embedder') \
}; \
import os; \
os.makedirs('./models/emotion-classification-model', exist_ok=True); \
os.makedirs('./models/fallback-npc-model', exist_ok=True); \
os.makedirs('./models/sentence-embedder', exist_ok=True); \
for key, (name, path) in models.items(): \
AutoModel.from_pretrained(name, cache_dir=path); \
AutoTokenizer.from_pretrained(name, cache_dir=path)"
# ----------- ν™˜κ²½ λ³€μˆ˜ μ„€μ • -----------
ENV EMOTION_MODEL_DIR=/app/models/emotion-classification-model
ENV FALLBACK_MODEL_DIR=/app/models/fallback-npc-model
ENV EMBEDDER_MODEL_DIR=/app/models/sentence-embedder
# ----------- 포트 μ„€μ • -----------
EXPOSE 8000
# ----------- μ‹€ν–‰ λͺ…λ Ή -----------
CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "${PORT:-8000}"]