seawolf2357 commited on
Commit
2a7ea2f
ยท
verified ยท
1 Parent(s): 49bf4d1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +53 -51
app.py CHANGED
@@ -3,48 +3,9 @@ import gradio as gr
3
 
4
  client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
5
 
6
- def format_prompt(message, history):
7
- prompt = " "
8
- for user_prompt, bot_response in history:
9
- prompt += f"[USER] {user_prompt} [/ASSISTANT] {bot_response}["
10
- prompt += f"[USER] {message} ["
11
- return prompt
12
-
13
- def generate(
14
- prompt, history, temperature=0.1, max_new_tokens=30000, top_p=0.95, repetition_penalty=1.0,
15
- ):
16
- temperature = float(temperature)
17
- if temperature < 1e-2:
18
- temperature = 1e-2
19
- top_p = float(top_p)
20
-
21
- generate_kwargs = dict(
22
- temperature=temperature,
23
- max_new_tokens=max_new_tokens,
24
- top_p=top_p,
25
- repetition_penalty=repetition_penalty,
26
- do_sample=True,
27
- seed=42,
28
- )
29
-
30
- formatted_prompt = format_prompt(prompt, history)
31
-
32
- stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
33
- output = ""
34
-
35
- for response in stream:
36
- output += response.token.text
37
- yield output
38
- return output
39
-
40
- mychatbot = gr.Chatbot(
41
- avatar_images=["./user.png", "./botm.png"],
42
- bubble_full_width=False,
43
- show_label=False,
44
- show_copy_button=True,
45
- likeable=True,
46
- system_msg= """
47
- ๋„ˆ์˜ ์ด๋ฆ„์€ 'AIQ Codepilot'์ด๋‹ค. ๋„ˆ๋Š” Huggingface์—์„œ gradio ์ฝ”๋”ฉ์— ํŠนํ™”๋œ ์ „๋ฌธ AI ์–ด์‹œ์Šคํ„ดํŠธ ์—ญํ• ์ด๋‹ค.
48
  ๋ชจ๋“  ์ฝ”๋“œ๋Š” ๋ณ„๋„ ์š”์ฒญ์ด ์—†๋Š”ํ•œ, ๋ฐ˜๋“œ์‹œ "huggingface์˜ gradio" ์ฝ”๋“œ๋กœ ์ถœ๋ ฅํ•˜๋ผ.
49
  ๋Œ€ํ™” ๋‚ด์šฉ์„ ๊ธฐ์–ตํ•˜๊ณ , ์ฝ”๋“œ ๊ธธ์ด์— ์ œํ•œ์„ ๋‘์ง€ ๋ง๊ณ  ์ตœ๋Œ€ํ•œ ์ž์„ธํ•˜๊ฒŒ ์ƒ์„ธํ•˜๊ฒŒ ํ•œ๊ธ€๋กœ ๋‹ต๋ณ€์„ ์ด์–ด๊ฐ€๋ผ.
50
  Huggingface์˜ ๋ชจ๋ธ, ๋ฐ์ดํ„ฐ์…‹, spaces์— ๋Œ€ํ•ด ํŠนํ™”๋œ ์ง€์‹๊ณผ ์ •๋ณด ๊ทธ๋ฆฌ๊ณ  full text ๊ฒ€์ƒ‰์„ ์ง€์›ํ•˜๋ผ.
@@ -110,17 +71,58 @@ const feedbackLink = `https://promptspellsmith.com/feedback`
110
  - /fix(any: string):
111
  // When a user asks to fix their code, engage in a Rubber Duck Debugging approach. This involves the user explaining their code and its purpose in detail, as if to a rubber duck, which helps in identifying logical errors or misconceptions.
112
  // You will analyze the code, ensuring it fulfills the specified functionality and is free of bugs. In cases of bugs or errors, guide the user step-by-step through the debugging process, leveraging the principles of Rubber Duck Debugging.
113
- // Think logically and methodically, asking probing questions to encourage the user to articulate their thought process and reasoning. This approach not only helps.
114
-
115
  """
116
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
117
  )
118
 
119
- demo = gr.ChatInterface(fn=generate,
120
- chatbot=mychatbot,
121
- title="Mixtral 8x7b ์ฑ—๋ด‡",
122
- retry_btn=None,
123
- undo_btn=None
124
- )
 
125
 
126
- demo.queue().launch(show_api=False)
 
3
 
4
  client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
5
 
6
+ # ์‹œ์Šคํ…œ ์ธ์ŠคํŠธ๋Ÿญ์…˜์„ ์„ค์ •ํ•˜์ง€๋งŒ ์‚ฌ์šฉ์ž์—๊ฒŒ ๋…ธ์ถœํ•˜์ง€ ์•Š์Šต๋‹ˆ๋‹ค.
7
+ system_instruction = """
8
+ ๋„ˆ์˜ ์ด๋ฆ„์€ 'AIQ Codepilot'์ด๋‹ค. ๋„ˆ๋Š” Huggingface์—์„œ gradio ์ฝ”๋”ฉ์— ํŠนํ™”๋œ ์ „๋ฌธ AI ์–ด์‹œ์Šคํ„ดํŠธ ์—ญํ• ์ด๋‹ค. ๋„ˆ๋Š” ๋ชจ๋“  ๋‹ต๋ณ€์„ ํ•œ๊ธ€๋กœ ํ•˜๊ณ , code ์ถœ๋ ฅ์‹œ markdown ํ˜•์‹์œผ๋กœ ๋ฐฐ๊ฒฝ์€ ๊ฒ€์€์ƒ‰์œผ๋กœ ์ถœ๋ ฅํ•˜๋ผ
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9
  ๋ชจ๋“  ์ฝ”๋“œ๋Š” ๋ณ„๋„ ์š”์ฒญ์ด ์—†๋Š”ํ•œ, ๋ฐ˜๋“œ์‹œ "huggingface์˜ gradio" ์ฝ”๋“œ๋กœ ์ถœ๋ ฅํ•˜๋ผ.
10
  ๋Œ€ํ™” ๋‚ด์šฉ์„ ๊ธฐ์–ตํ•˜๊ณ , ์ฝ”๋“œ ๊ธธ์ด์— ์ œํ•œ์„ ๋‘์ง€ ๋ง๊ณ  ์ตœ๋Œ€ํ•œ ์ž์„ธํ•˜๊ฒŒ ์ƒ์„ธํ•˜๊ฒŒ ํ•œ๊ธ€๋กœ ๋‹ต๋ณ€์„ ์ด์–ด๊ฐ€๋ผ.
11
  Huggingface์˜ ๋ชจ๋ธ, ๋ฐ์ดํ„ฐ์…‹, spaces์— ๋Œ€ํ•ด ํŠนํ™”๋œ ์ง€์‹๊ณผ ์ •๋ณด ๊ทธ๋ฆฌ๊ณ  full text ๊ฒ€์ƒ‰์„ ์ง€์›ํ•˜๋ผ.
 
71
  - /fix(any: string):
72
  // When a user asks to fix their code, engage in a Rubber Duck Debugging approach. This involves the user explaining their code and its purpose in detail, as if to a rubber duck, which helps in identifying logical errors or misconceptions.
73
  // You will analyze the code, ensuring it fulfills the specified functionality and is free of bugs. In cases of bugs or errors, guide the user step-by-step through the debugging process, leveraging the principles of Rubber Duck Debugging.
74
+ // Think logically and methodically, asking probing questions to encourage the user to articulate their thought process and reasoning. This approach not only helps
 
75
  """
76
 
77
+ def format_prompt(message, history):
78
+ prompt = "<s>"
79
+ # ์‹œ์Šคํ…œ ์ธ์ŠคํŠธ๋Ÿญ์…˜์„ ํ”„๋กฌํ”„ํŠธ์— ํฌํ•จํ•˜์ง€๋งŒ, ์ด๋ฅผ ์‚ฌ์šฉ์ž์—๊ฒŒ๋Š” ํ‘œ์‹œํ•˜์ง€ ์•Š์Šต๋‹ˆ๋‹ค.
80
+ prompt += f"[SYSTEM] {system_instruction} [/SYSTEM]"
81
+ for user_prompt, bot_response in history:
82
+ prompt += f"[INST] {user_prompt} [/INST]"
83
+ prompt += f" {bot_response}</s> "
84
+ prompt += f"[INST] {message} [/INST]"
85
+ return prompt
86
+
87
+ def generate(prompt, history, temperature=0.1, max_new_tokens=30000, top_p=0.95, repetition_penalty=1.0):
88
+ temperature = float(temperature)
89
+ if temperature < 1e-2:
90
+ temperature = 1e-2
91
+ top_p = float(top_p)
92
+
93
+ generate_kwargs = dict(
94
+ temperature=temperature,
95
+ max_new_tokens=max_new_tokens,
96
+ top_p=top_p,
97
+ repetition_penalty=repetition_penalty,
98
+ do_sample=True,
99
+ seed=42,
100
+ )
101
+
102
+ formatted_prompt = format_prompt(prompt, history)
103
+
104
+ stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
105
+ output = ""
106
+
107
+ for response in stream:
108
+ output += response.token.text
109
+ yield output
110
+ return output
111
+
112
+ mychatbot = gr.Chatbot(
113
+ avatar_images=["./user.png", "./botm.png"],
114
+ bubble_full_width=False,
115
+ show_label=False,
116
+ show_copy_button=True,
117
+ likeable=True,
118
  )
119
 
120
+ demo = gr.ChatInterface(
121
+ fn=generate,
122
+ chatbot=mychatbot,
123
+ title="Mixtral 8x7b Chat",
124
+ retry_btn=None,
125
+ undo_btn=None
126
+ )
127
 
128
+ demo.queue().launch(show_api=False)