Logeswaransr commited on
Commit
1d28e28
·
1 Parent(s): 1c25d3a

App files added

Browse files
Files changed (3) hide show
  1. Dockerfile +33 -0
  2. app.py +47 -0
  3. requirements.txt +2 -0
Dockerfile ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # read the doc: https://huggingface.co/docs/hub/spaces-sdks-docker
2
+ # you will also find guides on how best to write your Dockerfile
3
+
4
+ FROM python:latest
5
+
6
+ WORKDIR /code
7
+
8
+ COPY ./requirements.txt /code/requirements.txt
9
+
10
+ RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
11
+
12
+ # Install Ollama
13
+ RUN curl -fsSL https://ollama.com/install.sh | sh
14
+
15
+ RUN ollama serve
16
+
17
+ # Set up a new user named "user" with user ID 1000
18
+ RUN useradd -m -u 1000 user
19
+
20
+ # Switch to the "user" user
21
+ USER user
22
+
23
+ # Set home to the user's home directory
24
+ ENV HOME=/home/user \
25
+ PATH=/home/user/.local/bin:$PATH
26
+
27
+ # Set the working directory to the user's home directory
28
+ WORKDIR $HOME/app
29
+
30
+ # Copy the current directory contents into the container at $HOME/app setting the owner to the user
31
+ COPY --chown=user . $HOME/app
32
+
33
+ CMD ["python", "app.py"]
app.py ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from ollama import chat, ChatResponse
3
+ import subprocess
4
+
5
+ def interact(message: str, history: list):
6
+ message_dct = {
7
+ "role": "user",
8
+ "content": message
9
+ }
10
+
11
+ chat_history = [msg for msg in history]
12
+ chat_history.append(message_dct)
13
+
14
+ response: ChatResponse = chat(
15
+ model="deepseek-r1:1.5b",
16
+ messages=chat_history,
17
+ stream=True
18
+ )
19
+ text_response = ""
20
+ thinking_response = gr.ChatMessage(content="", metadata={"title":"Thinking Cloud"})
21
+ thinking = False
22
+
23
+ for chunk in response:
24
+ bit = chunk["message"]["content"]
25
+ if(bit == "<think>"):
26
+ thinking = True
27
+ continue
28
+ elif(bit == "</think>"):
29
+ thinking = False
30
+ continue
31
+
32
+ if(thinking):
33
+ thinking_response.content += bit
34
+ else:
35
+ text_response += bit
36
+ final_response = [thinking_response, text_response]
37
+ yield final_response
38
+
39
+ interface = gr.ChatInterface(
40
+ fn=interact,
41
+ type="messages",
42
+ title="Deepseek-R1 Chat Interface"
43
+ )
44
+
45
+ if __name__ == "__main__":
46
+ subprocess.run(["ollama", "run", "deepseek-r1:1.5b"])
47
+ interface.launch()
requirements.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ ollama
2
+ gradio