Spaces:
Runtime error
Runtime error
Upload folder using huggingface_hub
Browse files- __pycache__/gradio_app.cpython-311.pyc +0 -0
- gradio_app.py +22 -66
__pycache__/gradio_app.cpython-311.pyc
CHANGED
Binary files a/__pycache__/gradio_app.cpython-311.pyc and b/__pycache__/gradio_app.cpython-311.pyc differ
|
|
gradio_app.py
CHANGED
@@ -78,82 +78,38 @@ def load_chain():
|
|
78 |
openai_api_key='none',)
|
79 |
|
80 |
conv_agent_executor = create_conversational_retrieval_agent(
|
81 |
-
llm, tools, verbose=
|
82 |
)
|
83 |
return conv_agent_executor
|
84 |
|
85 |
|
86 |
-
class ChatWrapper:
|
87 |
-
def __init__(self):
|
88 |
-
self.lock = Lock()
|
89 |
-
|
90 |
-
def __call__(
|
91 |
-
self, inp: str, history: Optional[Tuple[str, str]], chain: Optional[ConversationChain]
|
92 |
-
):
|
93 |
-
"""Execute the chat functionality."""
|
94 |
-
self.lock.acquire()
|
95 |
-
try:
|
96 |
-
history = history or []
|
97 |
-
# Run chain and append input.
|
98 |
-
# output = chain({'input': inp})
|
99 |
-
output = 'this is an output'
|
100 |
-
history.append((inp, output))
|
101 |
-
except Exception as e:
|
102 |
-
raise e
|
103 |
-
finally:
|
104 |
-
self.lock.release()
|
105 |
-
return history, history
|
106 |
-
|
107 |
-
|
108 |
-
chat = ChatWrapper()
|
109 |
-
|
110 |
-
block = gr.Blocks(css=".gradio-container {background-color: red}")
|
111 |
-
|
112 |
-
with block:
|
113 |
-
chatbot = gr.Chatbot()
|
114 |
-
|
115 |
-
with gr.Row():
|
116 |
-
message = gr.Textbox(
|
117 |
-
label="What's your question?",
|
118 |
-
placeholder="What's the answer to life, the universe, and everything?",
|
119 |
-
lines=1,
|
120 |
-
)
|
121 |
-
submit = gr.Button(value="Send", variant="secondary").style(
|
122 |
-
full_width=False)
|
123 |
-
|
124 |
-
gr.Examples(
|
125 |
-
examples=[
|
126 |
-
"Hi! How's it going?",
|
127 |
-
"What should I do tonight?",
|
128 |
-
"Whats 2 + 2?",
|
129 |
-
],
|
130 |
-
inputs=message,
|
131 |
-
)
|
132 |
-
|
133 |
-
state = gr.State()
|
134 |
-
agent_state = gr.State()
|
135 |
-
|
136 |
-
load_chain()
|
137 |
-
|
138 |
-
submit.click(chat, inputs=[message,
|
139 |
-
state, agent_state], outputs=[chatbot, state])
|
140 |
-
message.submit(chat, inputs=[
|
141 |
-
message, state, agent_state], outputs=[chatbot, state])
|
142 |
-
|
143 |
with gr.Blocks() as demo:
|
144 |
chatbot = gr.Chatbot()
|
145 |
msg = gr.Textbox()
|
146 |
clear = gr.ClearButton([msg, chatbot])
|
147 |
chain = load_chain()
|
148 |
|
149 |
-
def respond(message, chat_history):
|
150 |
-
|
151 |
-
|
152 |
-
|
153 |
-
|
154 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
155 |
|
156 |
-
msg.submit(
|
|
|
157 |
|
158 |
if __name__ == "__main__":
|
159 |
-
demo.launch()
|
|
|
78 |
openai_api_key='none',)
|
79 |
|
80 |
conv_agent_executor = create_conversational_retrieval_agent(
|
81 |
+
llm, tools, verbose=False,
|
82 |
)
|
83 |
return conv_agent_executor
|
84 |
|
85 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
86 |
with gr.Blocks() as demo:
|
87 |
chatbot = gr.Chatbot()
|
88 |
msg = gr.Textbox()
|
89 |
clear = gr.ClearButton([msg, chatbot])
|
90 |
chain = load_chain()
|
91 |
|
92 |
+
# def respond(message, chat_history):
|
93 |
+
# print('message is', message)
|
94 |
+
# bot_message = chain({'input': message})['output']
|
95 |
+
# chat_history.append((message, bot_message))
|
96 |
+
# time.sleep(2)
|
97 |
+
# return "", chat_history
|
98 |
+
|
99 |
+
def user(user_message, history):
|
100 |
+
return "", history + [[user_message, None]]
|
101 |
+
|
102 |
+
def respond(history):
|
103 |
+
print('message is', history[-1])
|
104 |
+
bot_message = chain({'input': history[-1][0]})['output']
|
105 |
+
history[-1][1] = ""
|
106 |
+
for character in bot_message:
|
107 |
+
history[-1][1] += character
|
108 |
+
time.sleep(0.0)
|
109 |
+
yield history
|
110 |
|
111 |
+
msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False
|
112 |
+
).then(respond, chatbot, chatbot)
|
113 |
|
114 |
if __name__ == "__main__":
|
115 |
+
demo.queue(max_size=32).launch(enable_queue=True)
|