File size: 1,550 Bytes
1d28e28
 
 
3ee8645
 
 
1d28e28
 
 
 
 
 
 
 
 
 
 
3ee8645
1d28e28
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3ee8645
 
1d28e28
 
 
3ee8645
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
import gradio as gr
from ollama import chat, ChatResponse
import subprocess
import time

model_id = "deepseek-r1:1.5b"

def interact(message: str, history: list):
    message_dct = {
        "role": "user",
        "content": message
    }

    chat_history = [msg for msg in history]
    chat_history.append(message_dct)

    response: ChatResponse = chat(
        model=model_id,
        messages=chat_history,
        stream=True
    )
    text_response = ""
    thinking_response = gr.ChatMessage(content="", metadata={"title":"Thinking Cloud"})
    thinking = False

    for chunk in response:
        bit = chunk["message"]["content"]
        if(bit == "<think>"):
            thinking = True
            continue
        elif(bit == "</think>"):
            thinking = False
            continue

        if(thinking):
            thinking_response.content += bit
        else:
            text_response += bit
        final_response = [thinking_response, text_response]
        yield final_response

interface = gr.ChatInterface(
    fn=interact,
    type="messages",
    title="Deepseek-R1 Chat Interface",
    description="Model: Deepseek R1: 1.5B params"
)

if __name__ == "__main__":
    print("\n\nStarting Ollama...\n\n")
    subprocess.Popen(["ollama", "serve"])
    time.sleep(10)
    print("\n\nOllama started successfully!!\n\n\n\nTesting...\n\n")
    subprocess.run(["ollama", "pull", model_id])
    time.sleep(5)
    print("\n\nDeepseek-R1 started successfully!!\n\n")
    interface.launch(server_name="0.0.0.0", server_port=7860)