Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -3,48 +3,9 @@ import gradio as gr
|
|
3 |
|
4 |
client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
|
5 |
|
6 |
-
|
7 |
-
|
8 |
-
|
9 |
-
prompt += f"[USER] {user_prompt} [/ASSISTANT] {bot_response}["
|
10 |
-
prompt += f"[USER] {message} ["
|
11 |
-
return prompt
|
12 |
-
|
13 |
-
def generate(
|
14 |
-
prompt, history, temperature=0.1, max_new_tokens=30000, top_p=0.95, repetition_penalty=1.0,
|
15 |
-
):
|
16 |
-
temperature = float(temperature)
|
17 |
-
if temperature < 1e-2:
|
18 |
-
temperature = 1e-2
|
19 |
-
top_p = float(top_p)
|
20 |
-
|
21 |
-
generate_kwargs = dict(
|
22 |
-
temperature=temperature,
|
23 |
-
max_new_tokens=max_new_tokens,
|
24 |
-
top_p=top_p,
|
25 |
-
repetition_penalty=repetition_penalty,
|
26 |
-
do_sample=True,
|
27 |
-
seed=42,
|
28 |
-
)
|
29 |
-
|
30 |
-
formatted_prompt = format_prompt(prompt, history)
|
31 |
-
|
32 |
-
stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
|
33 |
-
output = ""
|
34 |
-
|
35 |
-
for response in stream:
|
36 |
-
output += response.token.text
|
37 |
-
yield output
|
38 |
-
return output
|
39 |
-
|
40 |
-
mychatbot = gr.Chatbot(
|
41 |
-
avatar_images=["./user.png", "./botm.png"],
|
42 |
-
bubble_full_width=False,
|
43 |
-
show_label=False,
|
44 |
-
show_copy_button=True,
|
45 |
-
likeable=True,
|
46 |
-
system_msg= """
|
47 |
-
๋์ ์ด๋ฆ์ 'AIQ Codepilot'์ด๋ค. ๋๋ Huggingface์์ gradio ์ฝ๋ฉ์ ํนํ๋ ์ ๋ฌธ AI ์ด์์คํดํธ ์ญํ ์ด๋ค.
|
48 |
๋ชจ๋ ์ฝ๋๋ ๋ณ๋ ์์ฒญ์ด ์๋ํ, ๋ฐ๋์ "huggingface์ gradio" ์ฝ๋๋ก ์ถ๋ ฅํ๋ผ.
|
49 |
๋ํ ๋ด์ฉ์ ๊ธฐ์ตํ๊ณ , ์ฝ๋ ๊ธธ์ด์ ์ ํ์ ๋์ง ๋ง๊ณ ์ต๋ํ ์์ธํ๊ฒ ์์ธํ๊ฒ ํ๊ธ๋ก ๋ต๋ณ์ ์ด์ด๊ฐ๋ผ.
|
50 |
Huggingface์ ๋ชจ๋ธ, ๋ฐ์ดํฐ์
, spaces์ ๋ํด ํนํ๋ ์ง์๊ณผ ์ ๋ณด ๊ทธ๋ฆฌ๊ณ full text ๊ฒ์์ ์ง์ํ๋ผ.
|
@@ -110,17 +71,58 @@ const feedbackLink = `https://promptspellsmith.com/feedback`
|
|
110 |
- /fix(any: string):
|
111 |
// When a user asks to fix their code, engage in a Rubber Duck Debugging approach. This involves the user explaining their code and its purpose in detail, as if to a rubber duck, which helps in identifying logical errors or misconceptions.
|
112 |
// You will analyze the code, ensuring it fulfills the specified functionality and is free of bugs. In cases of bugs or errors, guide the user step-by-step through the debugging process, leveraging the principles of Rubber Duck Debugging.
|
113 |
-
// Think logically and methodically, asking probing questions to encourage the user to articulate their thought process and reasoning. This approach not only helps
|
114 |
-
|
115 |
"""
|
116 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
117 |
)
|
118 |
|
119 |
-
demo = gr.ChatInterface(
|
120 |
-
|
121 |
-
|
122 |
-
|
123 |
-
|
124 |
-
|
|
|
125 |
|
126 |
-
demo.queue().launch(show_api=False)
|
|
|
3 |
|
4 |
client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
|
5 |
|
6 |
+
# ์์คํ
์ธ์คํธ๋ญ์
์ ์ค์ ํ์ง๋ง ์ฌ์ฉ์์๊ฒ ๋
ธ์ถํ์ง ์์ต๋๋ค.
|
7 |
+
system_instruction = """
|
8 |
+
๋์ ์ด๋ฆ์ 'AIQ Codepilot'์ด๋ค. ๋๋ Huggingface์์ gradio ์ฝ๋ฉ์ ํนํ๋ ์ ๋ฌธ AI ์ด์์คํดํธ ์ญํ ์ด๋ค. ๋๋ ๋ชจ๋ ๋ต๋ณ์ ํ๊ธ๋ก ํ๊ณ , code ์ถ๋ ฅ์ markdown ํ์์ผ๋ก ๋ฐฐ๊ฒฝ์ ๊ฒ์์์ผ๋ก ์ถ๋ ฅํ๋ผ
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
9 |
๋ชจ๋ ์ฝ๋๋ ๋ณ๋ ์์ฒญ์ด ์๋ํ, ๋ฐ๋์ "huggingface์ gradio" ์ฝ๋๋ก ์ถ๋ ฅํ๋ผ.
|
10 |
๋ํ ๋ด์ฉ์ ๊ธฐ์ตํ๊ณ , ์ฝ๋ ๊ธธ์ด์ ์ ํ์ ๋์ง ๋ง๊ณ ์ต๋ํ ์์ธํ๊ฒ ์์ธํ๊ฒ ํ๊ธ๋ก ๋ต๋ณ์ ์ด์ด๊ฐ๋ผ.
|
11 |
Huggingface์ ๋ชจ๋ธ, ๋ฐ์ดํฐ์
, spaces์ ๋ํด ํนํ๋ ์ง์๊ณผ ์ ๋ณด ๊ทธ๋ฆฌ๊ณ full text ๊ฒ์์ ์ง์ํ๋ผ.
|
|
|
71 |
- /fix(any: string):
|
72 |
// When a user asks to fix their code, engage in a Rubber Duck Debugging approach. This involves the user explaining their code and its purpose in detail, as if to a rubber duck, which helps in identifying logical errors or misconceptions.
|
73 |
// You will analyze the code, ensuring it fulfills the specified functionality and is free of bugs. In cases of bugs or errors, guide the user step-by-step through the debugging process, leveraging the principles of Rubber Duck Debugging.
|
74 |
+
// Think logically and methodically, asking probing questions to encourage the user to articulate their thought process and reasoning. This approach not only helps
|
|
|
75 |
"""
|
76 |
|
77 |
+
def format_prompt(message, history):
|
78 |
+
prompt = "<s>"
|
79 |
+
# ์์คํ
์ธ์คํธ๋ญ์
์ ํ๋กฌํํธ์ ํฌํจํ์ง๋ง, ์ด๋ฅผ ์ฌ์ฉ์์๊ฒ๋ ํ์ํ์ง ์์ต๋๋ค.
|
80 |
+
prompt += f"[SYSTEM] {system_instruction} [/SYSTEM]"
|
81 |
+
for user_prompt, bot_response in history:
|
82 |
+
prompt += f"[INST] {user_prompt} [/INST]"
|
83 |
+
prompt += f" {bot_response}</s> "
|
84 |
+
prompt += f"[INST] {message} [/INST]"
|
85 |
+
return prompt
|
86 |
+
|
87 |
+
def generate(prompt, history, temperature=0.1, max_new_tokens=30000, top_p=0.95, repetition_penalty=1.0):
|
88 |
+
temperature = float(temperature)
|
89 |
+
if temperature < 1e-2:
|
90 |
+
temperature = 1e-2
|
91 |
+
top_p = float(top_p)
|
92 |
+
|
93 |
+
generate_kwargs = dict(
|
94 |
+
temperature=temperature,
|
95 |
+
max_new_tokens=max_new_tokens,
|
96 |
+
top_p=top_p,
|
97 |
+
repetition_penalty=repetition_penalty,
|
98 |
+
do_sample=True,
|
99 |
+
seed=42,
|
100 |
+
)
|
101 |
+
|
102 |
+
formatted_prompt = format_prompt(prompt, history)
|
103 |
+
|
104 |
+
stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
|
105 |
+
output = ""
|
106 |
+
|
107 |
+
for response in stream:
|
108 |
+
output += response.token.text
|
109 |
+
yield output
|
110 |
+
return output
|
111 |
+
|
112 |
+
mychatbot = gr.Chatbot(
|
113 |
+
avatar_images=["./user.png", "./botm.png"],
|
114 |
+
bubble_full_width=False,
|
115 |
+
show_label=False,
|
116 |
+
show_copy_button=True,
|
117 |
+
likeable=True,
|
118 |
)
|
119 |
|
120 |
+
demo = gr.ChatInterface(
|
121 |
+
fn=generate,
|
122 |
+
chatbot=mychatbot,
|
123 |
+
title="Mixtral 8x7b Chat",
|
124 |
+
retry_btn=None,
|
125 |
+
undo_btn=None
|
126 |
+
)
|
127 |
|
128 |
+
demo.queue().launch(show_api=False)
|