File size: 3,010 Bytes
9d38059
 
 
 
 
64f1e0c
9d38059
64f1e0c
9d38059
 
 
64f1e0c
 
 
 
9d38059
 
 
 
 
64f1e0c
 
 
 
 
 
9d38059
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ab77966
9d38059
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
64f1e0c
 
 
9d38059
 
 
 
 
 
64f1e0c
9d38059
 
 
64f1e0c
9d38059
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
import datetime
import os

import gradio as gr
import langchain
import pickle
from langchain.vectorstores import Weaviate
from langchain import OpenAI

from chain import get_new_chain1

def get_faiss_store():
    with open("docs.pkl", 'rb') as f:
        faiss_store = pickle.load(f)
        return faiss_store


def set_openai_api_key(api_key, agent):
    if api_key:
        os.environ["OPENAI_API_KEY"] = api_key
        vectorstore = get_faiss_store()

        rephraser_llm = OpenAI(model_name="text-davinci-003", temperature=0)
        final_output_llm = OpenAI(model_name="text-davinci-003", temperature=0, max_tokens=-1)

        qa_chain = get_new_chain1(vectorstore, rephraser_llm, final_output_llm)
        os.environ["OPENAI_API_KEY"] = ""
        return qa_chain


def chat(inp, history, agent):
    history = history or []
    if agent is None:
        history.append((inp, "Please paste your OpenAI key to use"))
        return history, history
    print("\n==== date/time: " + str(datetime.datetime.now()) + " ====")
    print("inp: " + inp)
    history = history or []
    output = agent({"question": inp, "chat_history": history})
    answer = output["answer"]
    history.append((inp, answer))
    print(history)
    return history, history


block = gr.Blocks(css=".gradio-container {background-color: lightgray}")

with block:
    with gr.Row():
        gr.Markdown("<h3><center>Hugging Face Doc Search</center></h3><p>Ask questions about the Hugging Face Transformers Library</p>")

        openai_api_key_textbox = gr.Textbox(
            placeholder="Paste your OpenAI API key (sk-...)",
            show_label=False,
            lines=1,
            type="password",
        )

    chatbot = gr.Chatbot()

    with gr.Row():
        message = gr.Textbox(
            label="What's your question?",
            placeholder="What's the answer to life, the universe, and everything?",
            lines=1,
        )
        submit = gr.Button(value="Send", variant="secondary").style(full_width=False)

    gr.Examples(
        examples=[
            "How do I install transformers?",
            "How do I load pretrained instances with an AutoClass?",
            "How do I fine-tune a pretrained model?",
        ],
        inputs=message,
    )

    gr.HTML(
        """
    This simple application uses Langchain, an LLM, and FAISS to do Q&A over the Hugging Face Documentation."""
    )

    gr.HTML(
        "<center>Powered by <a href='huggingface.co'>Hugging Face 🤗</a> and <a href='https://github.com/hwchase17/langchain'>LangChain 🦜️🔗</a></center>"
    )

    state = gr.State()
    agent_state = gr.State()

    submit.click(chat, inputs=[message, state, agent_state], outputs=[chatbot, state])
    message.submit(chat, inputs=[message, state, agent_state], outputs=[chatbot, state])

    openai_api_key_textbox.change(
        set_openai_api_key,
        inputs=[openai_api_key_textbox, agent_state],
        outputs=[agent_state],
    )

block.launch(debug=True)