Spaces:
Sleeping
Sleeping
remove inferface fn
Browse filesSigned-off-by: peter szemraj <peterszemraj@gmail.com>
app.py
CHANGED
@@ -1,3 +1,5 @@
|
|
|
|
|
|
1 |
import gradio as gr
|
2 |
import torch
|
3 |
from gradio.themes.utils import sizes
|
@@ -68,19 +70,6 @@ def run_inference(prompt, temperature, max_new_tokens, top_p, repetition_penalty
|
|
68 |
return text
|
69 |
|
70 |
|
71 |
-
# Gradio interface wrapper for inference
|
72 |
-
def gradio_interface(
|
73 |
-
prompt: str,
|
74 |
-
temperature: float,
|
75 |
-
max_new_tokens: int,
|
76 |
-
top_p: float,
|
77 |
-
repetition_penalty: float,
|
78 |
-
):
|
79 |
-
return run_inference(prompt, temperature, max_new_tokens, top_p, repetition_penalty)
|
80 |
-
|
81 |
-
|
82 |
-
import random
|
83 |
-
|
84 |
examples = [
|
85 |
["def add_numbers(a, b):\n return", 0.2, 192, 0.9, 1.2],
|
86 |
[
|
@@ -214,14 +203,14 @@ with gr.Blocks(theme=theme, analytics_enabled=False, css=_styles) as demo:
|
|
214 |
version,
|
215 |
],
|
216 |
cache_examples=False,
|
217 |
-
fn=
|
218 |
outputs=[output],
|
219 |
)
|
220 |
gr.Markdown(base_model_info)
|
221 |
gr.Markdown(formats)
|
222 |
|
223 |
submit.click(
|
224 |
-
|
225 |
inputs=[
|
226 |
instruction,
|
227 |
temperature,
|
@@ -238,7 +227,6 @@ with gr.Blocks(theme=theme, analytics_enabled=False, css=_styles) as demo:
|
|
238 |
# .queue(max_size=10, api_open=False)
|
239 |
demo.launch(
|
240 |
debug=True,
|
241 |
-
server_port=DEFAULT_PORT,
|
242 |
show_api=False,
|
243 |
share=utils.is_google_colab(),
|
244 |
)
|
|
|
1 |
+
import random
|
2 |
+
|
3 |
import gradio as gr
|
4 |
import torch
|
5 |
from gradio.themes.utils import sizes
|
|
|
70 |
return text
|
71 |
|
72 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
73 |
examples = [
|
74 |
["def add_numbers(a, b):\n return", 0.2, 192, 0.9, 1.2],
|
75 |
[
|
|
|
203 |
version,
|
204 |
],
|
205 |
cache_examples=False,
|
206 |
+
fn=run_inference,
|
207 |
outputs=[output],
|
208 |
)
|
209 |
gr.Markdown(base_model_info)
|
210 |
gr.Markdown(formats)
|
211 |
|
212 |
submit.click(
|
213 |
+
run_inference,
|
214 |
inputs=[
|
215 |
instruction,
|
216 |
temperature,
|
|
|
227 |
# .queue(max_size=10, api_open=False)
|
228 |
demo.launch(
|
229 |
debug=True,
|
|
|
230 |
show_api=False,
|
231 |
share=utils.is_google_colab(),
|
232 |
)
|