Spaces:
Running
on
A100
Running
on
A100
AurelioAguirre
commited on
Commit
•
925480a
1
Parent(s):
2bd30ac
Fixed cache issue in dockerfile.
Browse files- Dockerfile +17 -4
Dockerfile
CHANGED
@@ -10,6 +10,14 @@ RUN apt-get update && \
|
|
10 |
apt-get clean && \
|
11 |
rm -rf /var/lib/apt/lists/*
|
12 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
13 |
# Copy requirements first to leverage Docker cache
|
14 |
COPY requirements.txt .
|
15 |
|
@@ -19,8 +27,9 @@ RUN pip install --no-cache-dir -r requirements.txt
|
|
19 |
# Copy the rest of the application
|
20 |
COPY . .
|
21 |
|
22 |
-
# Create checkpoints directory
|
23 |
-
RUN mkdir -p /app/checkpoints
|
|
|
24 |
|
25 |
# The token will be passed during build time
|
26 |
ARG HF_TOKEN
|
@@ -37,10 +46,14 @@ RUN if [ -n "$HF_TOKEN" ]; then \
|
|
37 |
# Set environment variables
|
38 |
ENV LLM_ENGINE_HOST=0.0.0.0
|
39 |
ENV LLM_ENGINE_PORT=8001
|
|
|
|
|
40 |
ENV MODEL_PATH=/app/checkpoints/meta-llama/Llama-2-3b-chat-hf
|
41 |
|
42 |
-
# Expose
|
43 |
-
|
|
|
|
|
44 |
|
45 |
# Command to run the application
|
46 |
CMD ["python", "main/main.py"]
|
|
|
10 |
apt-get clean && \
|
11 |
rm -rf /var/lib/apt/lists/*
|
12 |
|
13 |
+
# Create cache directory and set permissions
|
14 |
+
RUN mkdir -p /app/.cache/huggingface && \
|
15 |
+
chmod 777 /app/.cache/huggingface
|
16 |
+
|
17 |
+
# Set environment variables for cache
|
18 |
+
ENV TRANSFORMERS_CACHE=/app/.cache/huggingface/hub
|
19 |
+
ENV HF_HOME=/app/.cache/huggingface
|
20 |
+
|
21 |
# Copy requirements first to leverage Docker cache
|
22 |
COPY requirements.txt .
|
23 |
|
|
|
27 |
# Copy the rest of the application
|
28 |
COPY . .
|
29 |
|
30 |
+
# Create checkpoints directory with proper permissions
|
31 |
+
RUN mkdir -p /app/checkpoints && \
|
32 |
+
chmod 777 /app/checkpoints
|
33 |
|
34 |
# The token will be passed during build time
|
35 |
ARG HF_TOKEN
|
|
|
46 |
# Set environment variables
|
47 |
ENV LLM_ENGINE_HOST=0.0.0.0
|
48 |
ENV LLM_ENGINE_PORT=8001
|
49 |
+
|
50 |
+
# Update MODEL_PATH for the new model
|
51 |
ENV MODEL_PATH=/app/checkpoints/meta-llama/Llama-2-3b-chat-hf
|
52 |
|
53 |
+
# Expose both ports:
|
54 |
+
# 8001 for FastAPI
|
55 |
+
# 7860 for Hugging Face Spaces
|
56 |
+
EXPOSE 8001 7860
|
57 |
|
58 |
# Command to run the application
|
59 |
CMD ["python", "main/main.py"]
|