Spaces:
Sleeping
Sleeping
# Use the official Python image as the base image | |
FROM python:3.10 | |
# Set the working directory in the container | |
WORKDIR /app | |
# Create a non-root user | |
RUN useradd -ms /bin/bash myuser | |
# Give the user ownership of the working directory and home directory | |
RUN chown -R myuser:myuser /app /home/myuser | |
# Switch to the non-root user | |
USER myuser | |
# Copy the entire contents of the local directory into the container | |
COPY . . | |
# Download the model file | |
RUN wget https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGML/resolve/main/llama-2-7b-chat.ggmlv3.q8_0.bin | |
# Install chainlit and add it to PATH | |
RUN pip install chainlit==0.6.1 --user | |
# Set the PATH to include user-specific binaries | |
ENV PATH="/home/myuser/.local/bin:${PATH}" | |
# Install the required Python packages | |
RUN pip install -r requirements.txt | |
# Expose port 7860 internally in the container | |
EXPOSE 7860 | |
# Run the ChainlIt command | |
CMD ["chainlit", "run", "model.py", "-w", "--port", "7860"] | |