File size: 3,482 Bytes
9ae11a6
 
 
 
 
 
3f96f77
9ae11a6
88d4d6c
9ae11a6
9f575f8
9ae11a6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4cfa28f
9ae11a6
c87a80d
9ae11a6
 
 
 
 
 
3580bc8
9ae11a6
 
bfdfb4c
9ae11a6
aa126db
3580bc8
9ae11a6
 
 
aa126db
bfdfb4c
 
9ae11a6
 
bfdfb4c
 
9ae11a6
 
9f575f8
 
 
9ae11a6
 
 
 
 
 
ea7aacd
 
 
 
 
 
9ae11a6
ea7aacd
 
 
 
 
 
 
bfdfb4c
ea7aacd
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
944770b
9ae11a6
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107

import gradio as gr
import os
from langchain.embeddings import HuggingFaceEmbeddings
from langchain.vectorstores import FAISS
from langchain.chains.question_answering import load_qa_chain
from langchain.llms import HuggingFaceHub

apii=os.environ['spi']
COUNT, N = 0, 0
# k=[]
chat_history = []
chain = ''
# enable_box = gr.Textbox.update(value=None,
#                           placeholder='Upload your OpenAI API key', interactive=True)
# disable_box = gr.Textbox.update(value='OpenAI API key is Set', interactive=False)
def database():
    with open('database.txt', 'r', encoding='utf-8') as file:
    # Read the content of the file
        document = file.read()
    def split_text_into_batches(text, batch_size):

        batches = []
        for i in range(0, len(text), batch_size):
            batch = text[i:i + batch_size]
            batches.append(batch)
        return batches
    documents=split_text_into_batches(str(document),400)
    embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2", model_kwargs={'device': 'cpu'})
    db = FAISS.from_texts(documents, embeddings)

    return db

def set_apikey(api_key):
    os.environ["HUGGINFACEHUB_API_TOKEN"] = apii
    return disable_box
def enable_api_box():
    return enable_box
def add_text(history, text):
    if not text:
        raise gr.Error('Enter text')
    history = history + [(text, '')]
    return history
def generate_response(history, query):
    global COUNT, N, chat_history, chain, k
    db=database()
    llm=HuggingFaceHub(repo_id="stabilityai/stable-code-3b", model_kwargs={"temperature":1, "max_length":500},huggingfacehub_api_token=apii)
    chain = load_qa_chain(llm, chain_type="stuff")
    doc = (db.similarity_search_with_score(query))
    score=doc[0][-1]
    doc = doc[0][:-1]
    threshold =  0.7
    
    
    if score > threshold:
        # No relevant information found or information is below the specified threshold
        result="Sorry, but I can't answer that at the moment. Kindly recheck, the question may not be related to the Subject."
        print("Sorry, but I can't answer that at the moment. Kindly recheck, the question may not be related to the Subject.")
    else:
        # Relevant information found, proceed with the chain
        result=chain.run(input_documents=doc, question=query)
        print(chain.run(input_documents=doc, question=query))
    # k+=[(query, result)]
    chat_history += [(query, result)]

    for char in result:
        history[-1][-1] += char
        yield history, ''

with gr.Blocks() as demo:
    # Create a Gradio block

    with gr.Column():


        with gr.Row():
            chatbot = gr.Chatbot(value=[], elem_id='chatbot')
            # chatbot = gr.Chatbot(value=[], elem_id='chatbot').style(height=570)

    with gr.Row():
        with gr.Column(scale=2):
            txt = gr.Textbox(
                show_label=False,
                placeholder="Welcome to Chatbot for Ramayana."
            )
            # ).style(container=False)

        with gr.Column(scale=1):
            submit_btn = gr.Button('Submit')

    # Event handler for submitting text and generating response
    submit_btn.click(
        fn=add_text,
        inputs=[chatbot, txt],
        outputs=[chatbot],
        queue=False
    ).success(
        fn=generate_response,
        inputs=[chatbot, txt],
        outputs=[chatbot, txt]
    )

if __name__ == "__main__":    
    demo.queue()
    demo.launch(debug=True)