bbbipul commited on
Commit
25be116
1 Parent(s): 5b991eb

Update Dockerfile

Browse files
Files changed (1) hide show
  1. Dockerfile +56 -12
Dockerfile CHANGED
@@ -1,18 +1,62 @@
1
- FROM ghcr.io/ggerganov/llama.cpp:server
 
2
 
3
- # Install wget
4
- RUN apt-get update && apt-get install -y wget
 
 
5
 
6
- RUN mkdir -p /models/7B
 
 
7
 
8
- # Create a directory to store the downloaded file
9
- WORKDIR /app
10
 
11
- # Download the file
12
- RUN wget -O /models/7B/ggml-model.gguf "https://huggingface.co/openbmb/MiniCPM-Llama3-V-2_5-gguf/resolve/main/ggml-model-Q3_K.gguf?download=true"
 
 
13
 
14
- # Expose the port
15
- EXPOSE 8080
16
 
17
- # Command to run your application with the required arguments
18
- CMD ["-m", "/models/7B/ggml-model.gguf", "-c", "512", "--host", "0.0.0.0", "--port", "8080"]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Builder stage
2
+ # FROM ubuntu:latest
3
 
4
+ # # Update packages and install curl and gnupg
5
+ # RUN apt-get update && apt-get install -y \
6
+ # curl \
7
+ # gnupg
8
 
9
+ # # Add NVIDIA package repositories
10
+ # RUN curl -fsSL https://nvidia.github.io/libnvidia-container/gpgkey | gpg --dearmor -o /usr/share/keyrings/nvidia-container-toolkit-keyring.gpg \
11
+ # && echo "deb [signed-by=/usr/share/keyrings/nvidia-container-toolkit-keyring.gpg] https://nvidia.github.io/libnvidia-container/stable/deb/ $(. /etc/os-release; echo $UBUNTU_CODENAME) main" > /etc/apt/sources.list.d/nvidia-container-toolkit.list
12
 
13
+ # # Install NVIDIA container toolkit (Check for any updated methods or URLs for Ubuntu jammy)
14
+ # RUN apt-get update && apt-get install -y nvidia-container-toolkit || true
15
 
16
+ # # Install application
17
+ # RUN curl https://ollama.ai/install.sh | sh
18
+ # # Below is to fix embedding bug as per
19
+ # # RUN curl -fsSL https://ollama.com/install.sh | sed 's#https://ollama.com/download#https://github.com/jmorganca/ollama/releases/download/v0.1.29#' | sh
20
 
 
 
21
 
22
+ # # Create the directory and give appropriate permissions
23
+ # RUN mkdir -p /.ollama && chmod 777 /.ollama
24
+
25
+ # WORKDIR /.ollama
26
+
27
+ # # Copy the entry point script
28
+ # COPY entrypoint.sh /entrypoint.sh
29
+ # RUN chmod +x /entrypoint.sh
30
+
31
+ # # Set the entry point script as the default command
32
+ # ENTRYPOINT ["/entrypoint.sh"]
33
+ # CMD ["ollama", "serve"]
34
+
35
+ # # Set the model as an environment variable (this can be overridden)
36
+ # ENV model=${model}
37
+
38
+ # Expose the server port
39
+ # Use the official Ollama Docker image as the base image
40
+ FROM ollama/ollama:latest
41
+
42
+ # Create a directory for Ollama data
43
+ RUN mkdir -p /.ollama
44
+ RUN chmod -R 777 /.ollama
45
+
46
+ WORKDIR /.ollama
47
+
48
+ # Copy the entry point script
49
+ COPY entrypoint.sh /entrypoint.sh
50
+ RUN chmod +x /entrypoint.sh
51
+
52
+ # Set the entry point script as the default command
53
+ ENTRYPOINT ["/entrypoint.sh"]
54
+
55
+ # Set the model as an environment variable (this can be overridden)
56
+ ENV model=${model}
57
+
58
+ # Expose the port that Ollama runs on
59
+ EXPOSE 7860
60
+
61
+ # Command to start the Ollama server
62
+ CMD ["serve"]