Spaces:
Runtime error
Runtime error
yizhangliu
commited on
Commit
Β·
66ffbd1
1
Parent(s):
85deed6
Update app.py
Browse files
app.py
CHANGED
@@ -12,99 +12,22 @@ openai.api_key = os.getenv("OPENAI_API_KEY")
|
|
12 |
from utils import get_tmt_client, getTextTrans_tmt
|
13 |
tmt_client = get_tmt_client()
|
14 |
|
15 |
-
# language_translation_model = hub.Module(directory=f'./baidu_translate')
|
16 |
def getTextTrans(text, source='zh', target='en'):
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
# return False
|
23 |
|
24 |
-
|
25 |
-
|
26 |
-
|
27 |
-
# try:
|
28 |
-
# text_translation = language_translation_model.translate(text, source, target)
|
29 |
-
# return text_translation
|
30 |
-
# except Exception as e:
|
31 |
-
# return text
|
32 |
-
|
33 |
-
session_token = os.environ.get('SessionToken')
|
34 |
-
# logger.info(f"session_token_: {session_token}")
|
35 |
-
|
36 |
-
def get_api():
|
37 |
-
api = None
|
38 |
try:
|
39 |
-
|
40 |
-
|
41 |
except Exception as e:
|
42 |
-
|
43 |
-
api = None
|
44 |
-
return api
|
45 |
-
|
46 |
-
def get_response_from_chatgpt(api, text):
|
47 |
-
if api is None:
|
48 |
-
# return "Sorry, I'm busy. Try again later.(1)"
|
49 |
-
return "Openai said: I'm too tired. Let me lie down for a few days. If you like, you can visit my home(1)."
|
50 |
-
try:
|
51 |
-
resp = api.send_message(text)
|
52 |
-
# api.refresh_auth()
|
53 |
-
# api.reset_conversation()
|
54 |
-
response = resp['message']
|
55 |
-
conversation_id = resp['conversation_id']
|
56 |
-
parent_id = resp['parent_id']
|
57 |
-
# logger.info(f"response_: {response}")
|
58 |
-
logger.info(f"conversation_id_: [{conversation_id}] / parent_id: [{parent_id}]")
|
59 |
-
except:
|
60 |
-
# response = "Sorry, I'm busy. Try again later.(2)"
|
61 |
-
response = "Openai said: I'm so tired. Let me lie down for a few days. If you like, you can visit my home(2)."
|
62 |
-
return response
|
63 |
|
64 |
-
token_encoder = get_encoder()
|
65 |
-
total_tokens = 4096
|
66 |
-
max_output_tokens = 1024
|
67 |
-
max_input_tokens = total_tokens - max_output_tokens
|
68 |
-
|
69 |
-
def get_response_from_openai(input, history):
|
70 |
-
def openai_create(prompt):
|
71 |
-
# no chatgpt, and from gpt-3
|
72 |
-
try:
|
73 |
-
response = openai.Completion.create(
|
74 |
-
model="text-davinci-003",
|
75 |
-
prompt=prompt,
|
76 |
-
temperature=0.9,
|
77 |
-
max_tokens=max_output_tokens,
|
78 |
-
top_p=1,
|
79 |
-
frequency_penalty=0,
|
80 |
-
presence_penalty=0.6,
|
81 |
-
stop=[" Human:", " AI:"]
|
82 |
-
)
|
83 |
-
ret = response.choices[0].text
|
84 |
-
if ret == '':
|
85 |
-
ret = "Openai said: I'm too tired. Let me lie down for a few days. If you like, you can visit my home(3)."
|
86 |
-
except Exception as e:
|
87 |
-
ret = "Openai said: I'm too tired. Let me lie down for a few days. If you like, you can visit my home(4)."
|
88 |
-
|
89 |
-
return ret
|
90 |
-
|
91 |
-
history = history or []
|
92 |
-
his= [tuple(item) for item in history]
|
93 |
-
s = list(sum(his, ()))
|
94 |
-
s.append(input)
|
95 |
-
inp = ' '.join(s)
|
96 |
-
tokens = token_encoder.encode(inp)
|
97 |
-
if len(tokens) > max_input_tokens:
|
98 |
-
new_tokens = tokens[-max_input_tokens:]
|
99 |
-
inp = token_encoder.decode(new_tokens)
|
100 |
-
# tokens_1 = token_encoder.encode(inp)
|
101 |
-
# logger.info(f"tokens_len[1]__{len(tokens)}__{len(new_tokens)}__{len(tokens_1)}")
|
102 |
-
# else:
|
103 |
-
# logger.info(f"tokens_len[0]__{len(tokens)}")
|
104 |
-
|
105 |
-
output = openai_create(inp)
|
106 |
-
return output
|
107 |
-
|
108 |
start_work = """async() => {
|
109 |
function isMobile() {
|
110 |
try {
|
@@ -123,13 +46,7 @@ start_work = """async() => {
|
|
123 |
}
|
124 |
return clientHeight;
|
125 |
}
|
126 |
-
|
127 |
-
this_width = parseInt(img.style.width) + 20;
|
128 |
-
if (this_width > 100) {
|
129 |
-
this_width = 20;
|
130 |
-
}
|
131 |
-
img.style.width = this_width + "%";
|
132 |
-
}
|
133 |
function setNativeValue(element, value) {
|
134 |
const valueSetter = Object.getOwnPropertyDescriptor(element.__proto__, 'value').set;
|
135 |
const prototype = Object.getPrototypeOf(element);
|
@@ -142,50 +59,90 @@ start_work = """async() => {
|
|
142 |
}
|
143 |
element.dispatchEvent(new Event('input', { bubbles: true }));
|
144 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
145 |
function save_conversation(chatbot) {
|
146 |
var conversations = new Array();
|
147 |
-
var
|
148 |
for (var i = 0; i < chatbot.children.length; i++) {
|
|
|
|
|
|
|
|
|
149 |
innerHTML = chatbot.children[i].innerHTML;
|
150 |
-
conversations.push(innerHTML);
|
151 |
-
if (innerHTML.indexOf("<img ") == -1) {
|
152 |
-
|
153 |
}
|
154 |
}
|
155 |
var json_str = JSON.stringify(conversations);
|
156 |
-
setNativeValue(window['chat_his'], JSON.stringify(
|
157 |
localStorage.setItem('chatgpt_conversations', json_str);
|
158 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
159 |
function load_conversation(chatbot) {
|
160 |
var json_str = localStorage.getItem('chatgpt_conversations');
|
161 |
if (json_str) {
|
162 |
-
var
|
163 |
conversations = JSON.parse(json_str);
|
164 |
for (var i = 0; i < conversations.length; i++) {
|
165 |
-
|
166 |
-
if((
|
167 |
-
|
168 |
-
|
|
|
|
|
169 |
} else {
|
170 |
-
|
171 |
-
|
172 |
-
|
173 |
-
|
174 |
-
new_div.onclick = function(e){
|
175 |
-
img_click(this);
|
176 |
-
}
|
177 |
-
new_div.style.padding = "0.2rem";
|
178 |
-
}
|
179 |
}
|
180 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
181 |
new_div.innerHTML = innerHTML;
|
|
|
|
|
|
|
182 |
chatbot.appendChild(new_div);
|
183 |
|
184 |
-
if (innerHTML.indexOf("<img ") == -1) {
|
185 |
-
|
186 |
}
|
187 |
}
|
188 |
-
setNativeValue(window['chat_his'], JSON.stringify(
|
|
|
|
|
|
|
189 |
}
|
190 |
}
|
191 |
var gradioEl = document.querySelector('body > gradio-app').shadowRoot;
|
@@ -202,12 +159,16 @@ start_work = """async() => {
|
|
202 |
page1.style.display = "none";
|
203 |
page2.style.display = "block";
|
204 |
window['div_count'] = 0;
|
|
|
|
|
205 |
window['chat_bot'] = window['gradioEl'].querySelectorAll('#chat_bot')[0];
|
206 |
window['chat_bot1'] = window['gradioEl'].querySelectorAll('#chat_bot1')[0];
|
|
|
|
|
207 |
window['chat_his'] = window['gradioEl'].querySelectorAll('#chat_history')[0].querySelectorAll('textarea')[0];
|
208 |
chat_row = window['gradioEl'].querySelectorAll('#chat_row')[0];
|
209 |
prompt_row = window['gradioEl'].querySelectorAll('#prompt_row')[0];
|
210 |
-
window['chat_bot1'].children[1].textContent = '';
|
211 |
|
212 |
clientHeight = getClientHeight();
|
213 |
if (isMobile()) {
|
@@ -221,9 +182,11 @@ start_work = """async() => {
|
|
221 |
}
|
222 |
chat_row.style.height = new_height;
|
223 |
window['chat_bot'].style.height = new_height;
|
224 |
-
window['chat_bot'].children[
|
225 |
window['chat_bot1'].style.height = new_height;
|
226 |
-
window['chat_bot1'].children[
|
|
|
|
|
227 |
prompt_row.children[0].style.flex = 'auto';
|
228 |
prompt_row.children[0].style.width = '100%';
|
229 |
window['gradioEl'].querySelectorAll('#chat_radio')[0].style.flex = 'auto';
|
@@ -233,54 +196,77 @@ start_work = """async() => {
|
|
233 |
window['gradioEl'].querySelectorAll('#btns_row')[0].children[0].setAttribute('style', 'min-width: min(10px, 100%); flex-grow: 1');
|
234 |
window['gradioEl'].querySelectorAll('#btns_row')[0].children[1].setAttribute('style', 'min-width: min(10px, 100%); flex-grow: 1');
|
235 |
|
236 |
-
load_conversation(window['chat_bot1'].children[
|
237 |
-
window['chat_bot1'].children[
|
238 |
|
239 |
window['gradioEl'].querySelectorAll('#clear-btn')[0].onclick = function(e){
|
240 |
if (confirm('Clear all outputs?')==true) {
|
241 |
-
|
242 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
243 |
}
|
244 |
}
|
245 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
246 |
window['prevPrompt'] = '';
|
247 |
window['doCheckPrompt'] = 0;
|
248 |
window['prevImgSrc'] = '';
|
249 |
window['checkChange'] = function checkChange() {
|
250 |
try {
|
251 |
-
if (window['
|
252 |
-
|
253 |
-
|
|
|
|
|
254 |
for (var i = 0; i < new_len; i++) {
|
255 |
-
new_div = window['chat_bot'].children[
|
256 |
-
window['chat_bot1'].children[
|
257 |
}
|
258 |
-
window['div_count'] = chat_bot.children[
|
259 |
-
window['chat_bot1'].children[
|
260 |
-
save_conversation(window['chat_bot1'].children[
|
261 |
}
|
262 |
if (window['chat_bot'].children[0].children.length > 1) {
|
263 |
-
|
|
|
264 |
} else {
|
265 |
-
|
|
|
266 |
}
|
267 |
} else {
|
268 |
-
texts = window['gradioEl'].querySelectorAll('textarea');
|
269 |
-
text0 = texts[0];
|
270 |
-
text1 = texts[1];
|
271 |
img_index = 0;
|
272 |
-
|
273 |
-
if (window['doCheckPrompt']
|
274 |
-
|
|
|
275 |
window['doCheckPrompt'] = 1;
|
276 |
-
window['prevPrompt'] =
|
277 |
|
278 |
tabitems = window['gradioEl'].querySelectorAll('.tabitem');
|
279 |
for (var i = 0; i < tabitems.length; i++) {
|
280 |
-
inputText = tabitems[i].children[0].children[1].children[0].querySelectorAll('
|
281 |
-
setNativeValue(inputText,
|
282 |
}
|
283 |
setTimeout(function() {
|
|
|
284 |
btns = window['gradioEl'].querySelectorAll('button');
|
285 |
for (var i = 0; i < btns.length; i++) {
|
286 |
if (['Generate image','Run'].includes(btns[i].innerText)) {
|
@@ -295,31 +281,45 @@ start_work = """async() => {
|
|
295 |
if (imgs.length > 0) {
|
296 |
if (window['prevImgSrc'] !== imgs[0].src) {
|
297 |
var user_div = document.createElement("div");
|
298 |
-
user_div.className = "
|
299 |
user_div.style.backgroundColor = "#16a34a";
|
300 |
-
user_div.
|
301 |
-
window['
|
|
|
|
|
302 |
var bot_div = document.createElement("div");
|
303 |
-
bot_div.className = "
|
304 |
bot_div.style.backgroundColor = "#2563eb";
|
305 |
-
bot_div.style.width = "
|
|
|
306 |
bot_div.onclick = function(e){
|
307 |
img_click(this);
|
308 |
-
}
|
|
|
|
|
|
|
|
|
309 |
bot_div.style.padding = "0.2rem";
|
310 |
bot_div.appendChild(imgs[0].cloneNode(true));
|
311 |
-
window['chat_bot1'].children[
|
312 |
|
313 |
-
window['chat_bot1'].children[
|
314 |
window['prevImgSrc'] = imgs[0].src;
|
315 |
-
save_conversation(window['chat_bot1'].children[
|
316 |
}
|
317 |
}
|
318 |
if (tabitems[img_index].children[0].children[1].children[1].children[0].children.length > 1) {
|
319 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
320 |
} else {
|
321 |
-
|
322 |
-
|
|
|
323 |
}
|
324 |
|
325 |
} catch(e) {
|
@@ -349,44 +349,140 @@ for space_id in space_ids.keys():
|
|
349 |
except Exception as e:
|
350 |
logger.info(f"load_fail__{space_id}_{e}")
|
351 |
|
352 |
-
|
353 |
-
|
354 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
355 |
if chat_history != '':
|
356 |
-
|
357 |
-
|
358 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
359 |
|
360 |
-
|
|
|
|
|
361 |
if chat_radio == "Talk to chatGPT":
|
362 |
-
|
363 |
-
|
364 |
-
|
365 |
-
# logger.info(f'liuyz_5___{out_chat}__')
|
366 |
-
return api, out_chat, input1
|
367 |
else:
|
368 |
prompt_en = getTextTrans(input0, source='zh', target='en') + f',{random.randint(0,sys.maxsize)}'
|
369 |
-
return
|
370 |
|
|
|
|
|
|
|
|
|
|
|
|
|
371 |
with gr.Blocks(title='Talk to chatGPT') as demo:
|
372 |
-
with gr.
|
373 |
gr.HTML("<p>You can duplicating this space and use your own session token: <a style='display:inline-block' href='https://huggingface.co/spaces/yizhangliu/chatGPT?duplicate=true'><img src='https://img.shields.io/badge/-Duplicate%20Space-blue?labelColor=white&style=flat&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAAXNSR0IArs4c6QAAAP5JREFUOE+lk7FqAkEURY+ltunEgFXS2sZGIbXfEPdLlnxJyDdYB62sbbUKpLbVNhyYFzbrrA74YJlh9r079973psed0cvUD4A+4HoCjsA85X0Dfn/RBLBgBDxnQPfAEJgBY+A9gALA4tcbamSzS4xq4FOQAJgCDwV2CPKV8tZAJcAjMMkUe1vX+U+SMhfAJEHasQIWmXNN3abzDwHUrgcRGmYcgKe0bxrblHEB4E/pndMazNpSZGcsZdBlYJcEL9Afo75molJyM2FxmPgmgPqlWNLGfwZGG6UiyEvLzHYDmoPkDDiNm9JR9uboiONcBXrpY1qmgs21x1QwyZcpvxt9NS09PlsPAAAAAElFTkSuQmCC&logoWidth=14' alt='Duplicate Space'></a></p>")
|
374 |
-
gr.HTML("<p> Instruction on how to get session token can be seen in video <a style='display:inline-block' href='https://www.youtube.com/watch?v=TdNSj_qgdFk'><font style='color:blue;weight:bold;'>here</font></a>. Add your session token by going to settings and add under secrets. </p>")
|
375 |
with gr.Group(elem_id="page_1", visible=True) as page_1:
|
376 |
with gr.Box():
|
377 |
with gr.Row():
|
378 |
start_button = gr.Button("Let's talk to chatGPT!", elem_id="start-btn", visible=True)
|
379 |
start_button.click(fn=None, inputs=[], outputs=[], _js=start_work)
|
380 |
|
381 |
-
with gr.
|
382 |
with gr.Row(elem_id="chat_row"):
|
383 |
chatbot = gr.Chatbot(elem_id="chat_bot", visible=False).style(color_map=("green", "blue"))
|
384 |
chatbot1 = gr.Chatbot(elem_id="chat_bot1").style(color_map=("green", "blue"))
|
385 |
with gr.Row(elem_id="prompt_row"):
|
386 |
-
prompt_input0 = gr.Textbox(lines=2, label="
|
387 |
-
prompt_input1 = gr.Textbox(lines=4, label="prompt", visible=False)
|
388 |
-
chat_history = gr.Textbox(lines=4, label="
|
|
|
|
|
389 |
chat_radio = gr.Radio(["Talk to chatGPT", "Text to Image"], elem_id="chat_radio",value="Talk to chatGPT", show_label=False, visible=True)
|
|
|
|
|
|
|
|
|
390 |
with gr.Row(elem_id="btns_row"):
|
391 |
with gr.Column(id="submit_col"):
|
392 |
submit_btn = gr.Button(value = "submit",elem_id="submit-btn").style(
|
@@ -400,12 +496,22 @@ with gr.Blocks(title='Talk to chatGPT') as demo:
|
|
400 |
rounded=(True, True, True, True),
|
401 |
width=100
|
402 |
)
|
403 |
-
api = gr.State(value=get_api())
|
404 |
submit_btn.click(fn=chat,
|
405 |
-
inputs=[
|
406 |
-
outputs=[
|
407 |
)
|
408 |
with gr.Row(elem_id='tab_img', visible=False).style(height=5):
|
409 |
-
tab_img = gr.TabbedInterface(tab_actions, tab_titles)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
410 |
|
411 |
-
demo.launch(debug = True)
|
|
|
12 |
from utils import get_tmt_client, getTextTrans_tmt
|
13 |
tmt_client = get_tmt_client()
|
14 |
|
|
|
15 |
def getTextTrans(text, source='zh', target='en'):
|
16 |
+
def is_chinese(string):
|
17 |
+
for ch in string:
|
18 |
+
if u'\u4e00' <= ch <= u'\u9fff':
|
19 |
+
return True
|
20 |
+
return False
|
|
|
21 |
|
22 |
+
if not is_chinese(text) and target == 'en':
|
23 |
+
return text
|
24 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
25 |
try:
|
26 |
+
text_translation = getTextTrans_tmt(tmt_client, text, source, target)
|
27 |
+
return text_translation
|
28 |
except Exception as e:
|
29 |
+
return text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
30 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
31 |
start_work = """async() => {
|
32 |
function isMobile() {
|
33 |
try {
|
|
|
46 |
}
|
47 |
return clientHeight;
|
48 |
}
|
49 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
50 |
function setNativeValue(element, value) {
|
51 |
const valueSetter = Object.getOwnPropertyDescriptor(element.__proto__, 'value').set;
|
52 |
const prototype = Object.getPrototypeOf(element);
|
|
|
59 |
}
|
60 |
element.dispatchEvent(new Event('input', { bubbles: true }));
|
61 |
}
|
62 |
+
function get_clear_innerHTML(innerHTML) {
|
63 |
+
innerHTML = innerHTML.replace(/<p>|<\\/p>|\\n/g, '');
|
64 |
+
regexp = /\\β
β(.*?)\\ββ
/;
|
65 |
+
match = innerHTML.match(regexp);
|
66 |
+
if (match) {
|
67 |
+
innerHTML = match[1];
|
68 |
+
}
|
69 |
+
return innerHTML;
|
70 |
+
}
|
71 |
function save_conversation(chatbot) {
|
72 |
var conversations = new Array();
|
73 |
+
var conversations_clear = new Array();
|
74 |
for (var i = 0; i < chatbot.children.length; i++) {
|
75 |
+
testid_icon = 'β:'; //'user'
|
76 |
+
if (chatbot.children[i].dataset['testid'] == 'bot') {
|
77 |
+
testid_icon = 'β:'; //'bot'
|
78 |
+
}
|
79 |
innerHTML = chatbot.children[i].innerHTML;
|
80 |
+
conversations.push(testid_icon + innerHTML);
|
81 |
+
if (innerHTML.indexOf("<img") == -1 && innerHTML.indexOf("null_") == -1) {
|
82 |
+
conversations_clear.push(testid_icon + get_clear_innerHTML(innerHTML));
|
83 |
}
|
84 |
}
|
85 |
var json_str = JSON.stringify(conversations);
|
86 |
+
setNativeValue(window['chat_his'], JSON.stringify(conversations_clear));
|
87 |
localStorage.setItem('chatgpt_conversations', json_str);
|
88 |
}
|
89 |
+
function img_click(img) {
|
90 |
+
this_width = parseInt(img.style.width) + 20;
|
91 |
+
if (this_width > 100) {
|
92 |
+
this_width = 20;
|
93 |
+
}
|
94 |
+
img.style.width = this_width + "%";
|
95 |
+
img.style.height = img.offsetWidth + 'px';
|
96 |
+
}
|
97 |
function load_conversation(chatbot) {
|
98 |
var json_str = localStorage.getItem('chatgpt_conversations');
|
99 |
if (json_str) {
|
100 |
+
var conversations_clear = new Array();
|
101 |
conversations = JSON.parse(json_str);
|
102 |
for (var i = 0; i < conversations.length; i++) {
|
103 |
+
innerHTML = conversations[i];
|
104 |
+
if (innerHTML.indexOf("β:") == -1) {
|
105 |
+
className = "message user svelte-134zwfa";
|
106 |
+
bgcolor = "#16a34a";
|
107 |
+
testid = "user";
|
108 |
+
testid_icon = 'β:'; //'user'
|
109 |
} else {
|
110 |
+
className = "message bot svelte-134zwfa";
|
111 |
+
bgcolor = "#2563eb";
|
112 |
+
testid = "bot";
|
113 |
+
testid_icon = 'β:'; //'bot'
|
|
|
|
|
|
|
|
|
|
|
114 |
}
|
115 |
+
var new_div = document.createElement("div");
|
116 |
+
new_div.className = className;
|
117 |
+
new_div.style.backgroundColor = bgcolor;
|
118 |
+
new_div.dataset.testid = testid;
|
119 |
+
if (innerHTML.indexOf("data:image/jpeg") >= 0) {
|
120 |
+
new_div.style.width = "20%";
|
121 |
+
new_div.style.padding = "0.2rem";
|
122 |
+
new_div.onclick = function(e) {
|
123 |
+
img_click(this);
|
124 |
+
}
|
125 |
+
setTimeout(function(){
|
126 |
+
new_div.style.height = new_div.offsetWidth + 'px';
|
127 |
+
new_div.children[0].setAttribute('style', 'max-width: none; width:100%');
|
128 |
+
}, 10);
|
129 |
+
}
|
130 |
+
innerHTML = innerHTML.replace("β:", "");
|
131 |
+
innerHTML = innerHTML.replace("β:", "");
|
132 |
new_div.innerHTML = innerHTML;
|
133 |
+
if (innerHTML.indexOf("null_") != -1) {
|
134 |
+
new_div.style.display = 'none';
|
135 |
+
}
|
136 |
chatbot.appendChild(new_div);
|
137 |
|
138 |
+
if (innerHTML.indexOf("<img") == -1 && innerHTML.indexOf("null_") == -1) {
|
139 |
+
conversations_clear.push(testid_icon + get_clear_innerHTML(innerHTML));
|
140 |
}
|
141 |
}
|
142 |
+
setNativeValue(window['chat_his'], JSON.stringify(conversations_clear));
|
143 |
+
setTimeout(function(){
|
144 |
+
window['chat_bot1'].children[1].scrollTop = window['chat_bot1'].children[1].scrollHeight;
|
145 |
+
}, 500);
|
146 |
}
|
147 |
}
|
148 |
var gradioEl = document.querySelector('body > gradio-app').shadowRoot;
|
|
|
159 |
page1.style.display = "none";
|
160 |
page2.style.display = "block";
|
161 |
window['div_count'] = 0;
|
162 |
+
window['chat_radio_0'] = window['gradioEl'].querySelectorAll('#chat_radio')[0].querySelectorAll('input[name=radio-chat_radio]')[0];
|
163 |
+
window['chat_radio_1'] = window['gradioEl'].querySelectorAll('#chat_radio')[0].querySelectorAll('input[name=radio-chat_radio]')[1];
|
164 |
window['chat_bot'] = window['gradioEl'].querySelectorAll('#chat_bot')[0];
|
165 |
window['chat_bot1'] = window['gradioEl'].querySelectorAll('#chat_bot1')[0];
|
166 |
+
window['my_prompt'] = window['gradioEl'].querySelectorAll('#my_prompt')[0].querySelectorAll('textarea')[0];
|
167 |
+
window['my_prompt_en'] = window['gradioEl'].querySelectorAll('#my_prompt_en')[0].querySelectorAll('textarea')[0];
|
168 |
window['chat_his'] = window['gradioEl'].querySelectorAll('#chat_history')[0].querySelectorAll('textarea')[0];
|
169 |
chat_row = window['gradioEl'].querySelectorAll('#chat_row')[0];
|
170 |
prompt_row = window['gradioEl'].querySelectorAll('#prompt_row')[0];
|
171 |
+
window['chat_bot1'].children[1].children[0].textContent = '';
|
172 |
|
173 |
clientHeight = getClientHeight();
|
174 |
if (isMobile()) {
|
|
|
182 |
}
|
183 |
chat_row.style.height = new_height;
|
184 |
window['chat_bot'].style.height = new_height;
|
185 |
+
window['chat_bot'].children[1].style.height = new_height;
|
186 |
window['chat_bot1'].style.height = new_height;
|
187 |
+
window['chat_bot1'].children[1].style.height = new_height;
|
188 |
+
window['chat_bot1'].children[0].style.top = (parseInt(window['chat_bot1'].style.height)-window['chat_bot1'].children[0].offsetHeight-2) + 'px';
|
189 |
+
|
190 |
prompt_row.children[0].style.flex = 'auto';
|
191 |
prompt_row.children[0].style.width = '100%';
|
192 |
window['gradioEl'].querySelectorAll('#chat_radio')[0].style.flex = 'auto';
|
|
|
196 |
window['gradioEl'].querySelectorAll('#btns_row')[0].children[0].setAttribute('style', 'min-width: min(10px, 100%); flex-grow: 1');
|
197 |
window['gradioEl'].querySelectorAll('#btns_row')[0].children[1].setAttribute('style', 'min-width: min(10px, 100%); flex-grow: 1');
|
198 |
|
199 |
+
load_conversation(window['chat_bot1'].children[1].children[0]);
|
200 |
+
window['chat_bot1'].children[1].scrollTop = window['chat_bot1'].children[1].scrollHeight;
|
201 |
|
202 |
window['gradioEl'].querySelectorAll('#clear-btn')[0].onclick = function(e){
|
203 |
if (confirm('Clear all outputs?')==true) {
|
204 |
+
for (var i = window['chat_bot'].children[1].children[0].children.length-1; i >= 0; i--) {
|
205 |
+
window['chat_bot'].children[1].children[0].removeChild(window['chat_bot'].children[1].children[0].children[i]);
|
206 |
+
}
|
207 |
+
for (var i = window['chat_bot1'].children[1].children[0].children.length-1; i >= 0; i--) {
|
208 |
+
window['chat_bot1'].children[1].children[0].removeChild(window['chat_bot1'].children[1].children[0].children[i]);
|
209 |
+
}
|
210 |
+
window['div_count'] = 0;
|
211 |
+
save_conversation(window['chat_bot1'].children[1].children[0]);
|
212 |
}
|
213 |
}
|
214 |
|
215 |
+
function set_buttons(action) {
|
216 |
+
window['submit-btn'].disabled = action;
|
217 |
+
window['clear-btn'].disabled = action;
|
218 |
+
window['chat_radio_0'].disabled = action;
|
219 |
+
window['chat_radio_1'].disabled = action;
|
220 |
+
btn_color = 'color:#000';
|
221 |
+
if (action) {
|
222 |
+
btn_color = 'color:#ccc';
|
223 |
+
}
|
224 |
+
window['submit-btn'].setAttribute('style', btn_color);
|
225 |
+
window['clear-btn'].setAttribute('style', btn_color);
|
226 |
+
window['chat_radio_0'].setAttribute('style', btn_color);
|
227 |
+
window['chat_radio_1'].setAttribute('style', btn_color);
|
228 |
+
}
|
229 |
window['prevPrompt'] = '';
|
230 |
window['doCheckPrompt'] = 0;
|
231 |
window['prevImgSrc'] = '';
|
232 |
window['checkChange'] = function checkChange() {
|
233 |
try {
|
234 |
+
if (window['chat_radio_0'].checked) {
|
235 |
+
dot_flashing = window['chat_bot'].children[1].children[0].querySelectorAll('.dot-flashing');
|
236 |
+
|
237 |
+
if (window['chat_bot'].children[1].children[0].children.length > window['div_count'] && dot_flashing.length == 0) {
|
238 |
+
new_len = window['chat_bot'].children[1].children[0].children.length - window['div_count'];
|
239 |
for (var i = 0; i < new_len; i++) {
|
240 |
+
new_div = window['chat_bot'].children[1].children[0].children[window['div_count'] + i].cloneNode(true);
|
241 |
+
window['chat_bot1'].children[1].children[0].appendChild(new_div);
|
242 |
}
|
243 |
+
window['div_count'] = window['chat_bot'].children[1].children[0].children.length;
|
244 |
+
window['chat_bot1'].children[1].scrollTop = window['chat_bot1'].children[1].scrollHeight;
|
245 |
+
save_conversation(window['chat_bot1'].children[1].children[0]);
|
246 |
}
|
247 |
if (window['chat_bot'].children[0].children.length > 1) {
|
248 |
+
set_buttons(true);
|
249 |
+
window['chat_bot1'].children[0].textContent = window['chat_bot'].children[0].children[1].textContent;
|
250 |
} else {
|
251 |
+
set_buttons(false);
|
252 |
+
window['chat_bot1'].children[0].textContent = '';
|
253 |
}
|
254 |
} else {
|
|
|
|
|
|
|
255 |
img_index = 0;
|
256 |
+
draw_prompt_en = window['my_prompt_en'].value;
|
257 |
+
if (window['doCheckPrompt'] == 0 && window['prevPrompt'] != draw_prompt_en) {
|
258 |
+
|
259 |
+
console.log('_____draw_prompt_en___[' + draw_prompt_en + ']_');
|
260 |
window['doCheckPrompt'] = 1;
|
261 |
+
window['prevPrompt'] = draw_prompt_en;
|
262 |
|
263 |
tabitems = window['gradioEl'].querySelectorAll('.tabitem');
|
264 |
for (var i = 0; i < tabitems.length; i++) {
|
265 |
+
inputText = tabitems[i].children[0].children[1].children[0].querySelectorAll('input')[0];
|
266 |
+
setNativeValue(inputText, draw_prompt_en);
|
267 |
}
|
268 |
setTimeout(function() {
|
269 |
+
window['draw_prompt'] = window['my_prompt'].value;
|
270 |
btns = window['gradioEl'].querySelectorAll('button');
|
271 |
for (var i = 0; i < btns.length; i++) {
|
272 |
if (['Generate image','Run'].includes(btns[i].innerText)) {
|
|
|
281 |
if (imgs.length > 0) {
|
282 |
if (window['prevImgSrc'] !== imgs[0].src) {
|
283 |
var user_div = document.createElement("div");
|
284 |
+
user_div.className = "message user svelte-134zwfa";
|
285 |
user_div.style.backgroundColor = "#16a34a";
|
286 |
+
user_div.dataset.testid = 'user';
|
287 |
+
user_div.innerHTML = "<p>δ½η»: " + window['draw_prompt'] + "</p><img></img>";
|
288 |
+
window['chat_bot1'].children[1].children[0].appendChild(user_div);
|
289 |
+
|
290 |
var bot_div = document.createElement("div");
|
291 |
+
bot_div.className = "message bot svelte-134zwfa";
|
292 |
bot_div.style.backgroundColor = "#2563eb";
|
293 |
+
bot_div.style.width = "20%";
|
294 |
+
bot_div.dataset.testid = 'bot';
|
295 |
bot_div.onclick = function(e){
|
296 |
img_click(this);
|
297 |
+
}
|
298 |
+
setTimeout(function(){
|
299 |
+
bot_div.style.height = bot_div.offsetWidth + 'px';
|
300 |
+
bot_div.children[0].setAttribute('style', 'max-width:none; width:100%');
|
301 |
+
}, 10);
|
302 |
bot_div.style.padding = "0.2rem";
|
303 |
bot_div.appendChild(imgs[0].cloneNode(true));
|
304 |
+
window['chat_bot1'].children[1].children[0].appendChild(bot_div);
|
305 |
|
306 |
+
window['chat_bot1'].children[1].scrollTop = window['chat_bot1'].children[1].scrollHeight;
|
307 |
window['prevImgSrc'] = imgs[0].src;
|
308 |
+
save_conversation(window['chat_bot1'].children[1].children[0]);
|
309 |
}
|
310 |
}
|
311 |
if (tabitems[img_index].children[0].children[1].children[1].children[0].children.length > 1) {
|
312 |
+
tips = tabitems[img_index].children[0].children[1].children[1].children[0].textContent;
|
313 |
+
if (tips.indexOf("Error") == -1) {
|
314 |
+
set_buttons(true);
|
315 |
+
} else {
|
316 |
+
set_buttons(false);
|
317 |
+
}
|
318 |
+
window['chat_bot1'].children[0].textContent = 'δ½η»δΈ ' + tips;
|
319 |
} else {
|
320 |
+
set_buttons(false);
|
321 |
+
window['chat_bot1'].children[0].textContent = '';
|
322 |
+
}
|
323 |
}
|
324 |
|
325 |
} catch(e) {
|
|
|
349 |
except Exception as e:
|
350 |
logger.info(f"load_fail__{space_id}_{e}")
|
351 |
|
352 |
+
token_encoder = get_encoder()
|
353 |
+
total_tokens = 4096
|
354 |
+
max_output_tokens = 1024
|
355 |
+
max_input_tokens = total_tokens - max_output_tokens
|
356 |
+
|
357 |
+
def set_openai_api_key(api_key):
|
358 |
+
if api_key and api_key.startswith("sk-") and len(api_key) > 50:
|
359 |
+
openai.api_key = api_key
|
360 |
+
|
361 |
+
def get_response_from_openai(input, chat_history, model_radio):
|
362 |
+
def openai_create(input_list, model_radio):
|
363 |
+
try:
|
364 |
+
# print(f'input_list={input_list}')
|
365 |
+
input_list_len = len(input_list)
|
366 |
+
out_prompt = ''
|
367 |
+
messages = []
|
368 |
+
if model_radio == 'GPT-3.0':
|
369 |
+
out_prompt = 'AI:'
|
370 |
+
for i in range(input_list_len):
|
371 |
+
input = input_list[input_list_len-i-1].replace("<br>", '\n\n')
|
372 |
+
if input.startswith("β:"):
|
373 |
+
if model_radio == 'GPT-3.0':
|
374 |
+
out_prompt = input.replace("β:", "AI:") + '\n' + out_prompt
|
375 |
+
else:
|
376 |
+
out_prompt = input.replace("β:", "") + out_prompt
|
377 |
+
messages.insert(0, {"role": "assistant", "content": input.replace("β:", "")})
|
378 |
+
elif input.startswith("β:"):
|
379 |
+
if model_radio == 'GPT-3.0':
|
380 |
+
out_prompt = input.replace("β:", "Human:") + '\n' + out_prompt
|
381 |
+
else:
|
382 |
+
out_prompt = input.replace("β:", "") + out_prompt
|
383 |
+
messages.insert(0, {"role": "user", "content": input.replace("β:", "")})
|
384 |
+
tokens = token_encoder.encode(out_prompt)
|
385 |
+
if len(tokens) > max_input_tokens:
|
386 |
+
break
|
387 |
+
|
388 |
+
if model_radio == 'GPT-3.0':
|
389 |
+
# print(out_prompt)
|
390 |
+
response = openai.Completion.create(
|
391 |
+
model="text-davinci-003",
|
392 |
+
prompt=out_prompt,
|
393 |
+
temperature=0.7,
|
394 |
+
max_tokens=max_output_tokens,
|
395 |
+
top_p=1,
|
396 |
+
frequency_penalty=0,
|
397 |
+
presence_penalty=0,
|
398 |
+
stop=[" Human:", " AI:"]
|
399 |
+
)
|
400 |
+
# print(f'response_3.0__:{response}')
|
401 |
+
ret = response.choices[0].text
|
402 |
+
else:
|
403 |
+
# print(messages)
|
404 |
+
response = openai.ChatCompletion.create(
|
405 |
+
model="gpt-3.5-turbo",
|
406 |
+
messages=messages,
|
407 |
+
temperature=0.7,
|
408 |
+
max_tokens=max_output_tokens,
|
409 |
+
top_p=1,
|
410 |
+
frequency_penalty=0,
|
411 |
+
presence_penalty=0,
|
412 |
+
stop=[" Human:", " AI:"]
|
413 |
+
)
|
414 |
+
# print(f'response_3.5__:{response}')
|
415 |
+
ret = response.choices[0].message['content']
|
416 |
+
if ret.startswith("\n\n"):
|
417 |
+
ret = ret.replace("\n\n", '')
|
418 |
+
ret = ret.replace('\n', '<br>')
|
419 |
+
if ret == '':
|
420 |
+
ret = f"Openai said: I'm too tired(1)."
|
421 |
+
return ret, response.usage
|
422 |
+
except Exception as e:
|
423 |
+
logger.info(f"openai_create_error__{e}")
|
424 |
+
ret = f"Openai said: I'm too tired(2)."
|
425 |
+
return ret, {"completion_tokens": -1, "prompt_tokens": -1, "total_tokens": -1}
|
426 |
+
|
427 |
+
print(f'chat_history = {chat_history}')
|
428 |
+
chat_history_list = []
|
429 |
+
chat_history = chat_history.replace("<p>", "").replace("</p>", "")
|
430 |
+
chat_history = chat_history.replace("Openai said: I'm too tired(1).", "")
|
431 |
+
chat_history = chat_history.replace("Openai said: I'm too tired(2).", "")
|
432 |
if chat_history != '':
|
433 |
+
chat_history_list = json.loads(chat_history)
|
434 |
+
chat_history_list.append(f'β:{input}')
|
435 |
+
|
436 |
+
output, response_usage = openai_create(chat_history_list, model_radio)
|
437 |
+
print(f'response_usage={response_usage}')
|
438 |
+
return output
|
439 |
+
|
440 |
+
def chat(input0, input1, chat_radio, model_radio, all_chat_history, chat_history):
|
441 |
+
all_chat = []
|
442 |
+
if all_chat_history != '':
|
443 |
+
all_chat = json.loads(all_chat_history)
|
444 |
|
445 |
+
if len(input0) == 0:
|
446 |
+
return all_chat, json.dumps(all_chat), input0, input1
|
447 |
+
|
448 |
if chat_radio == "Talk to chatGPT":
|
449 |
+
response = get_response_from_openai(input0, chat_history, model_radio)
|
450 |
+
all_chat.append((input0, response))
|
451 |
+
return all_chat, json.dumps(all_chat), '', input1
|
|
|
|
|
452 |
else:
|
453 |
prompt_en = getTextTrans(input0, source='zh', target='en') + f',{random.randint(0,sys.maxsize)}'
|
454 |
+
return all_chat, json.dumps(all_chat), input0, prompt_en
|
455 |
|
456 |
+
def chat_radio_change(chat_radio):
|
457 |
+
if chat_radio == "Talk to chatGPT":
|
458 |
+
return gr.Radio.update(visible=True), gr.Text.update(visible=True)
|
459 |
+
else:
|
460 |
+
return gr.Radio.update(visible=False), gr.Text.update(visible=False)
|
461 |
+
|
462 |
with gr.Blocks(title='Talk to chatGPT') as demo:
|
463 |
+
with gr.Row(elem_id="page_0", visible=False) as page_0:
|
464 |
gr.HTML("<p>You can duplicating this space and use your own session token: <a style='display:inline-block' href='https://huggingface.co/spaces/yizhangliu/chatGPT?duplicate=true'><img src='https://img.shields.io/badge/-Duplicate%20Space-blue?labelColor=white&style=flat&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAAXNSR0IArs4c6QAAAP5JREFUOE+lk7FqAkEURY+ltunEgFXS2sZGIbXfEPdLlnxJyDdYB62sbbUKpLbVNhyYFzbrrA74YJlh9r079973psed0cvUD4A+4HoCjsA85X0Dfn/RBLBgBDxnQPfAEJgBY+A9gALA4tcbamSzS4xq4FOQAJgCDwV2CPKV8tZAJcAjMMkUe1vX+U+SMhfAJEHasQIWmXNN3abzDwHUrgcRGmYcgKe0bxrblHEB4E/pndMazNpSZGcsZdBlYJcEL9Afo75molJyM2FxmPgmgPqlWNLGfwZGG6UiyEvLzHYDmoPkDDiNm9JR9uboiONcBXrpY1qmgs21x1QwyZcpvxt9NS09PlsPAAAAAElFTkSuQmCC&logoWidth=14' alt='Duplicate Space'></a></p>")
|
|
|
465 |
with gr.Group(elem_id="page_1", visible=True) as page_1:
|
466 |
with gr.Box():
|
467 |
with gr.Row():
|
468 |
start_button = gr.Button("Let's talk to chatGPT!", elem_id="start-btn", visible=True)
|
469 |
start_button.click(fn=None, inputs=[], outputs=[], _js=start_work)
|
470 |
|
471 |
+
with gr.Row(elem_id="page_2", visible=False) as page_2:
|
472 |
with gr.Row(elem_id="chat_row"):
|
473 |
chatbot = gr.Chatbot(elem_id="chat_bot", visible=False).style(color_map=("green", "blue"))
|
474 |
chatbot1 = gr.Chatbot(elem_id="chat_bot1").style(color_map=("green", "blue"))
|
475 |
with gr.Row(elem_id="prompt_row"):
|
476 |
+
prompt_input0 = gr.Textbox(lines=2, label="input", elem_id="my_prompt", show_label=True)
|
477 |
+
prompt_input1 = gr.Textbox(lines=4, label="prompt", elem_id="my_prompt_en", visible=False)
|
478 |
+
chat_history = gr.Textbox(lines=4, label="chat_history", elem_id="chat_history", visible=False)
|
479 |
+
all_chat_history = gr.Textbox(lines=4, label="δΌθ―δΈδΈζοΌ", elem_id="all_chat_history", visible=False)
|
480 |
+
|
481 |
chat_radio = gr.Radio(["Talk to chatGPT", "Text to Image"], elem_id="chat_radio",value="Talk to chatGPT", show_label=False, visible=True)
|
482 |
+
model_radio = gr.Radio(["GPT-3.0", "GPT-3.5"], elem_id="model_radio", value="GPT-3.5",
|
483 |
+
label='GPT model: ', show_label=True,interactive=True, visible=True)
|
484 |
+
openai_api_key_textbox = gr.Textbox(placeholder="Paste your OpenAI API key (sk-...) and hit Enter",
|
485 |
+
show_label=False, lines=1, type='password')
|
486 |
with gr.Row(elem_id="btns_row"):
|
487 |
with gr.Column(id="submit_col"):
|
488 |
submit_btn = gr.Button(value = "submit",elem_id="submit-btn").style(
|
|
|
496 |
rounded=(True, True, True, True),
|
497 |
width=100
|
498 |
)
|
|
|
499 |
submit_btn.click(fn=chat,
|
500 |
+
inputs=[prompt_input0, prompt_input1, chat_radio, model_radio, all_chat_history, chat_history],
|
501 |
+
outputs=[chatbot, all_chat_history, prompt_input0, prompt_input1],
|
502 |
)
|
503 |
with gr.Row(elem_id='tab_img', visible=False).style(height=5):
|
504 |
+
tab_img = gr.TabbedInterface(tab_actions, tab_titles)
|
505 |
+
|
506 |
+
openai_api_key_textbox.change(set_openai_api_key,
|
507 |
+
inputs=[openai_api_key_textbox],
|
508 |
+
outputs=[])
|
509 |
+
openai_api_key_textbox.submit(set_openai_api_key,
|
510 |
+
inputs=[openai_api_key_textbox],
|
511 |
+
outputs=[])
|
512 |
+
chat_radio.change(fn=chat_radio_change,
|
513 |
+
inputs=[chat_radio],
|
514 |
+
outputs=[model_radio, openai_api_key_textbox],
|
515 |
+
)
|
516 |
|
517 |
+
demo.launch(debug = True)
|