kwabs22
Testing Stable LM 2 1.6B Zephyr
f5daf13
raw
history blame
1.13 kB
# Use an official Python runtime as a parent image
FROM python:3.8-slim
# Set the working directory in the container
WORKDIR /usr/src/app
# Install system dependencies
RUN apt-get update && apt-get install -y \
wget \
git \
build-essential
# Clone llama.cpp
RUN git clone https://github.com/ggerganov/llama.cpp.git
# Download the model
RUN cd llama.cpp/models && \
wget -O stablelm-2-zephyr-1_6b-Q4_0.gguf https://huggingface.co/stabilityai/stablelm-2-zephyr-1_6b/resolve/main/stablelm-2-zephyr-1_6b-Q4_0.gguf?download=true
# Build llama.cpp
RUN cd llama.cpp && \
make -j
# Set MPLCONFIGDIR to a writable directory
ENV MPLCONFIGDIR /usr/src/app/flagged
# Make sure the directory exists and has proper permissions
RUN mkdir -p $MPLCONFIGDIR && chmod -R 777 $MPLCONFIGDIR
# Copy the current directory contents into the container at /usr/src/app
COPY . .
# Install any needed packages specified in requirements.txt
RUN pip install --no-cache-dir -r requirements.txt
# Make port 7860 available to the world outside this container
EXPOSE 7860
# Run app.py when the container launches
CMD ["python", "./app.py"]