|
|
|
FROM python:3.10-slim |
|
|
|
|
|
RUN apt-get update && \ |
|
apt-get install -y \ |
|
libopenblas-dev \ |
|
ninja-build \ |
|
build-essential \ |
|
pkg-config \ |
|
curl |
|
|
|
RUN pip install -U pip setuptools wheel && \ |
|
CMAKE_ARGS="-DLLAMA_BLAS=ON -DLLAMA_BLAS_VENDOR=OpenBLAS" FORCE_CMAKE=1 pip install --verbose llama-cpp-python[server] |
|
|
|
|
|
RUN mkdir model && \ |
|
curl -L https://huggingface.co/TheBloke/zephyr-7B-alpha-GGUF/blob/main/zephyr-7b-alpha.Q4_K_M.gguf -o model/gguf-model.gguf |
|
|
|
COPY ./start_server.sh ./ |
|
COPY ./main.py ./ |
|
COPY ./index.html ./ |
|
|
|
|
|
RUN chmod +x ./start_server.sh |
|
|
|
|
|
ENV HOST=0.0.0.0 |
|
ENV PORT=7860 |
|
|
|
|
|
EXPOSE ${PORT} |
|
|
|
|
|
CMD ["/bin/sh", "./start_server.sh"] |