File size: 2,169 Bytes
1dfc6af
1f2e29e
3b92a2c
1dfc6af
3b92a2c
 
 
 
1dfc6af
3b92a2c
1dfc6af
3b92a2c
 
1dfc6af
3b92a2c
1dfc6af
 
3b92a2c
 
 
 
1dfc6af
 
 
 
 
3b92a2c
1dfc6af
59d67c7
 
1dfc6af
 
 
3b92a2c
 
 
 
 
 
 
 
1dfc6af
3b92a2c
 
1dfc6af
3b92a2c
 
1dfc6af
3b92a2c
 
 
 
8ae401a
 
 
 
1dfc6af
8ae401a
1f2e29e
 
 
 
 
3b92a2c
1f2e29e
 
 
 
 
3b92a2c
1f2e29e
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
import torch
from langchain.llms import CTransformers
from langchain.chains import LLMChain
from langchain import PromptTemplate
import gradio as gr
import time

custom_prompt_template = """
You are an AI Chatbot named Sunny, created by 'Sic Team', and your task is to provide information to users and chat with them based on given user's query. Below is the user's query.
Query: {query}
You just return the helpful message in English and always try to provide relevant answers to the user's query.
"""


def set_custom_prompt():
    prompt = PromptTemplate(
        template=custom_prompt_template, input_variables=['query'])
    return prompt


def load_model():
    # Check if GPU is available
    device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
    print(f"Using device: {device}")

    # Load the locally downloaded model here, specifying the device
    llm = CTransformers(
        model="TheBloke/zephyr-7B-beta-GGUF",
        model_type="mistral",
        max_new_tokens=4384,
        temperature=0.2,
        repetition_penalty=1.13,
        device=device  # Set the device explicitly during model initialization
    )

    return llm


def chain_pipeline():
    llm = load_model()
    main_prompt = set_custom_prompt()
    main_chain = LLMChain(prompt=main_prompt, llm=llm)
    return main_chain


llmchain = chain_pipeline()


def bot(query):
    llm_response = llmchain.run({"query": query})
    return llm_response

head = f"""
<script async src="https://pagead2.googlesyndication.com/pagead/js/adsbygoogle.js?client=ca-pub-2769307761697918"
     crossorigin="anonymous"></script>
"""

with gr.Blocks(head=head, title='Sunny', css="footer {visibility: hidden}") as main:
    gr.Markdown("# Sunny Chatbot")
    chatbot = gr.Chatbot([], elem_id="chatbot", height=700)
    msg = gr.Textbox()
    clear = gr.ClearButton([msg, chatbot])
    css="footer {visibility: hidden}"

    def respond(message, chat_history):
        bot_message = bot(message)
        chat_history.append((message, bot_message))
        time.sleep(2)
        return "", chat_history

    msg.submit(respond, [msg, chatbot], [msg, chatbot])

main.launch(share=False)