Still comand issues
Browse files- Dockerfile +4 -1
Dockerfile
CHANGED
|
@@ -45,9 +45,12 @@ from DotsOCR import modeling_dots_ocr_vllm' $(which vllm)
|
|
| 45 |
EXPOSE 7860
|
| 46 |
ENV PORT=7860
|
| 47 |
|
|
|
|
|
|
|
|
|
|
| 48 |
# Notes:
|
| 49 |
# --chat-template-content-format string per dots.ocr README
|
| 50 |
# --served-model-name model so clients can use model="model"
|
| 51 |
# --trust-remote-code load repo's custom code
|
| 52 |
# --host 0.0.0.0 --port $PORT bind to Space port
|
| 53 |
-
CMD
|
|
|
|
| 45 |
EXPOSE 7860
|
| 46 |
ENV PORT=7860
|
| 47 |
|
| 48 |
+
# Override the entrypoint to use bash directly
|
| 49 |
+
ENTRYPOINT ["/bin/bash", "-c"]
|
| 50 |
+
|
| 51 |
# Notes:
|
| 52 |
# --chat-template-content-format string per dots.ocr README
|
| 53 |
# --served-model-name model so clients can use model="model"
|
| 54 |
# --trust-remote-code load repo's custom code
|
| 55 |
# --host 0.0.0.0 --port $PORT bind to Space port
|
| 56 |
+
CMD ["vllm serve ${HF_MODEL_PATH} --host 0.0.0.0 --port ${PORT} --served-model-name model --gpu-memory-utilization 0.95 --chat-template-content-format string --trust-remote-code"]
|