Spaces:
Runtime error
Runtime error
File size: 1,762 Bytes
e3d5c47 1dba1e4 915906a d1bb23f 4455993 8c9ed7b 4455993 05640dd 07bc98f 501bb25 7c57cb5 aad72b3 f04c76b d1bb23f c2ac3c4 e5ab26b f7ece44 fa322c3 e3d5c47 07bc98f fa322c3 07bc98f 0c06541 7425f0a 95d689f ad40be2 4e5e875 aad72b3 183ccc4 b8c4ad0 4e5e875 7b1e862 8c9ed7b dbcf40b 8c9ed7b bcfd37f 230d1b0 95d689f 7c57cb5 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 |
FROM nvidia/cuda:12.4.1-cudnn-devel-ubuntu22.04
ENV DEBIAN_FRONTEND=noninteractive
RUN apt-get update && \
apt-get upgrade -y && \
apt-get install -y --no-install-recommends ca-certificates \
git \
git-lfs \
wget \
curl \
nvidia-driver-535 \
python3.10 \
python3.10-venv \
python3-pip \
python-is-python3
RUN curl -L https://ollama.com/download/ollama-linux-amd64 -o /usr/bin/ollama
RUN chmod +x /usr/bin/ollama
ENV USER='ollamafy'
RUN useradd -m -u 1000 ${USER}
USER ${USER}
ENV HOME=/home/${USER} \
PATH=${HOME}/.local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:${PATH} \
APPDIR=${HOME}/app
WORKDIR ${APPDIR}
COPY --chown=1000 . ${APPDIR}
ENV PYTHONPATH=${HOME}/.local/bin:${PYTHONPATH}
RUN python -m pip install --no-cache-dir -U pip setuptools wheel
RUN python -m pip install "huggingface-hub" "hf-transfer" "gradio[oauth]>=4.28.0" "gradio_huggingfacehub_search==0.0.7" "APScheduler"
RUN git clone https://github.com/ggerganov/llama.cpp
COPY groups_merged.txt llama.cpp/.
WORKDIR ${APPDIR}/llama.cpp
RUN git pull
RUN python -m pip install -r requirements.txt
RUN GGML_CUDA=1 LLAMA_CUDA=1 make -j llama-quantize
ENV PYTHONUNBUFFERED=1 \
HF_HUB_ENABLE_HF_TRANSFER=1 \
GRADIO_ALLOW_FLAGGING=never \
GRADIO_NUM_PORTS=1 \
GRADIO_SERVER_NAME=0.0.0.0 \
GRADIO_THEME=huggingface \
TQDM_POSITION=-1 \
TQDM_MININTERVAL=1 \
SYSTEM=spaces \
LD_LIBRARY_PATH=/usr/local/nvidia/lib:/usr/local/nvidia/lib64:/usr/local/cuda/lib64:${LD_LIBRARY_PATH} \
NVIDIA_DRIVER_CAPABILITIES=compute,utility \
NVIDIA_VISIBLE_DEVICES=all \
OLLAMA_MODELS=${APPDIR}/.ollama/models \
OLLAMA_HOST=127.0.0.1:0
WORKDIR ${APPDIR}
ENTRYPOINT python app.py |