Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -27,10 +27,6 @@ As a derivate work of Code Llama by Meta,
|
|
27 |
this demo is governed by the original [license](https://huggingface.co/spaces/huggingface-projects/codellama-2-13b-chat/blob/main/LICENSE.txt) and [acceptable use policy](https://huggingface.co/spaces/huggingface-projects/codellama-2-13b-chat/blob/main/USE_POLICY.md).
|
28 |
"""
|
29 |
|
30 |
-
if not torch.cuda.is_available():
|
31 |
-
DESCRIPTION += '\n<p>Running on CPU 🥶 This demo does not work on CPU.</p>'
|
32 |
-
|
33 |
-
|
34 |
def clear_and_save_textbox(message: str) -> tuple[str, str]:
|
35 |
return '', message
|
36 |
|
@@ -104,58 +100,7 @@ with gr.Blocks(css='style.css') as demo:
|
|
104 |
variant='primary',
|
105 |
scale=1,
|
106 |
min_width=0)
|
107 |
-
|
108 |
-
retry_button = gr.Button('🔄 Retry', variant='secondary')
|
109 |
-
undo_button = gr.Button('↩️ Undo', variant='secondary')
|
110 |
-
clear_button = gr.Button('🗑️ Clear', variant='secondary')
|
111 |
-
|
112 |
-
saved_input = gr.State()
|
113 |
-
|
114 |
-
with gr.Accordion(label='Advanced options', open=False):
|
115 |
-
system_prompt = gr.Textbox(label='System prompt',
|
116 |
-
value=DEFAULT_SYSTEM_PROMPT,
|
117 |
-
lines=6)
|
118 |
-
max_new_tokens = gr.Slider(
|
119 |
-
label='Max new tokens',
|
120 |
-
minimum=1,
|
121 |
-
maximum=MAX_MAX_NEW_TOKENS,
|
122 |
-
step=1,
|
123 |
-
value=DEFAULT_MAX_NEW_TOKENS,
|
124 |
-
)
|
125 |
-
temperature = gr.Slider(
|
126 |
-
label='Temperature',
|
127 |
-
minimum=0.1,
|
128 |
-
maximum=4.0,
|
129 |
-
step=0.1,
|
130 |
-
value=0.1,
|
131 |
-
)
|
132 |
-
top_p = gr.Slider(
|
133 |
-
label='Top-p (nucleus sampling)',
|
134 |
-
minimum=0.05,
|
135 |
-
maximum=1.0,
|
136 |
-
step=0.05,
|
137 |
-
value=0.9,
|
138 |
-
)
|
139 |
-
top_k = gr.Slider(
|
140 |
-
label='Top-k',
|
141 |
-
minimum=1,
|
142 |
-
maximum=1000,
|
143 |
-
step=1,
|
144 |
-
value=10,
|
145 |
-
)
|
146 |
-
|
147 |
-
gr.Examples(
|
148 |
-
examples=[
|
149 |
-
'What is the Fibonacci sequence?',
|
150 |
-
'Can you explain briefly what Python is good for?',
|
151 |
-
'How can I display a grid of images in SwiftUI?',
|
152 |
-
],
|
153 |
-
inputs=textbox,
|
154 |
-
outputs=[textbox, chatbot],
|
155 |
-
fn=process_example,
|
156 |
-
cache_examples=True,
|
157 |
-
)
|
158 |
-
|
159 |
gr.Markdown(LICENSE)
|
160 |
|
161 |
textbox.submit(
|
@@ -221,53 +166,3 @@ with gr.Blocks(css='style.css') as demo:
|
|
221 |
outputs=chatbot,
|
222 |
api_name=False,
|
223 |
)
|
224 |
-
|
225 |
-
retry_button.click(
|
226 |
-
fn=delete_prev_fn,
|
227 |
-
inputs=chatbot,
|
228 |
-
outputs=[chatbot, saved_input],
|
229 |
-
api_name=False,
|
230 |
-
queue=False,
|
231 |
-
).then(
|
232 |
-
fn=display_input,
|
233 |
-
inputs=[saved_input, chatbot],
|
234 |
-
outputs=chatbot,
|
235 |
-
api_name=False,
|
236 |
-
queue=False,
|
237 |
-
).then(
|
238 |
-
fn=generate,
|
239 |
-
inputs=[
|
240 |
-
saved_input,
|
241 |
-
chatbot,
|
242 |
-
system_prompt,
|
243 |
-
max_new_tokens,
|
244 |
-
temperature,
|
245 |
-
top_p,
|
246 |
-
top_k,
|
247 |
-
],
|
248 |
-
outputs=chatbot,
|
249 |
-
api_name=False,
|
250 |
-
)
|
251 |
-
|
252 |
-
undo_button.click(
|
253 |
-
fn=delete_prev_fn,
|
254 |
-
inputs=chatbot,
|
255 |
-
outputs=[chatbot, saved_input],
|
256 |
-
api_name=False,
|
257 |
-
queue=False,
|
258 |
-
).then(
|
259 |
-
fn=lambda x: x,
|
260 |
-
inputs=[saved_input],
|
261 |
-
outputs=textbox,
|
262 |
-
api_name=False,
|
263 |
-
queue=False,
|
264 |
-
)
|
265 |
-
|
266 |
-
clear_button.click(
|
267 |
-
fn=lambda: ([], ''),
|
268 |
-
outputs=[chatbot, saved_input],
|
269 |
-
queue=False,
|
270 |
-
api_name=False,
|
271 |
-
)
|
272 |
-
|
273 |
-
demo.queue(max_size=20).launch()
|
|
|
27 |
this demo is governed by the original [license](https://huggingface.co/spaces/huggingface-projects/codellama-2-13b-chat/blob/main/LICENSE.txt) and [acceptable use policy](https://huggingface.co/spaces/huggingface-projects/codellama-2-13b-chat/blob/main/USE_POLICY.md).
|
28 |
"""
|
29 |
|
|
|
|
|
|
|
|
|
30 |
def clear_and_save_textbox(message: str) -> tuple[str, str]:
|
31 |
return '', message
|
32 |
|
|
|
100 |
variant='primary',
|
101 |
scale=1,
|
102 |
min_width=0)
|
103 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
104 |
gr.Markdown(LICENSE)
|
105 |
|
106 |
textbox.submit(
|
|
|
166 |
outputs=chatbot,
|
167 |
api_name=False,
|
168 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|