Spaces:
Runtime error
Runtime error
Update Dockerfile
Browse files- Dockerfile +5 -3
Dockerfile
CHANGED
@@ -3,6 +3,9 @@ FROM nvidia/cuda:${CUDA_IMAGE}
|
|
3 |
|
4 |
# We need to set the host to 0.0.0.0 to allow outside access
|
5 |
ENV HOST 0.0.0.0
|
|
|
|
|
|
|
6 |
|
7 |
RUN apt-get update && apt-get upgrade -y \
|
8 |
&& apt-get install -y git build-essential \
|
@@ -11,8 +14,6 @@ RUN apt-get update && apt-get upgrade -y \
|
|
11 |
libclblast-dev libopenblas-dev \
|
12 |
&& mkdir -p /etc/OpenCL/vendors && echo "libnvidia-opencl.so.1" > /etc/OpenCL/vendors/nvidia.icd
|
13 |
|
14 |
-
COPY . .
|
15 |
-
|
16 |
# setting build related env vars
|
17 |
ENV CUDA_DOCKER_ARCH=all
|
18 |
ENV LLAMA_CUBLAS=1
|
@@ -21,7 +22,8 @@ ENV LLAMA_CUBLAS=1
|
|
21 |
RUN python3 -m pip install --no-cache-dir --upgrade pip pytest cmake scikit-build setuptools fastapi uvicorn sse-starlette pydantic-settings starlette-context huggingface-hub==0.14.1 flask
|
22 |
|
23 |
# Install llama-cpp-python (build with cuda)
|
24 |
-
RUN CMAKE_ARGS="-DLLAMA_CUBLAS=on" pip install llama-cpp-python
|
|
|
25 |
|
26 |
# Run the server
|
27 |
CMD python3 -m app.py
|
|
|
3 |
|
4 |
# We need to set the host to 0.0.0.0 to allow outside access
|
5 |
ENV HOST 0.0.0.0
|
6 |
+
RUN useradd -m -u 1000 user
|
7 |
+
WORKDIR /home/user/app
|
8 |
+
COPY --link --chown=1000 ./ /home/user/app
|
9 |
|
10 |
RUN apt-get update && apt-get upgrade -y \
|
11 |
&& apt-get install -y git build-essential \
|
|
|
14 |
libclblast-dev libopenblas-dev \
|
15 |
&& mkdir -p /etc/OpenCL/vendors && echo "libnvidia-opencl.so.1" > /etc/OpenCL/vendors/nvidia.icd
|
16 |
|
|
|
|
|
17 |
# setting build related env vars
|
18 |
ENV CUDA_DOCKER_ARCH=all
|
19 |
ENV LLAMA_CUBLAS=1
|
|
|
22 |
RUN python3 -m pip install --no-cache-dir --upgrade pip pytest cmake scikit-build setuptools fastapi uvicorn sse-starlette pydantic-settings starlette-context huggingface-hub==0.14.1 flask
|
23 |
|
24 |
# Install llama-cpp-python (build with cuda)
|
25 |
+
RUN CMAKE_ARGS="-DLLAMA_CUBLAS=on" pip install --no-cache-dir llama-cpp-python
|
26 |
+
|
27 |
|
28 |
# Run the server
|
29 |
CMD python3 -m app.py
|