ttj commited on
Commit
efc0714
1 Parent(s): 22ac6cb
Files changed (1) hide show
  1. app.py +48 -1
app.py CHANGED
@@ -1,2 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import gradio as gr
2
- gr.Interface.load("huggingface/bigscience/T0").launch()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
2
+
3
+ def get_pipe(name):
4
+ tokenizer = AutoTokenizer.from_pretrained(name)
5
+ model = AutoModelForSeq2SeqLM.from_pretrained(name)
6
+ pipe = pipeline(
7
+ "text-generation", model=model, tokenizer=tokenizer, framework="pt"
8
+ )
9
+ return pipe
10
+ model_names = ['bigscience/T0_3B','bigscience/T0'] #, 'bigscience/T0p', 'bigscience/T0pp']
11
+
12
+ pipes = [get_pipe(name) for name in model_names]
13
+ def fn(text, do_sample, min_length, max_length, temperature, top_p, pipe):
14
+ out = pipe(
15
+ text,
16
+ do_sample=do_sample,
17
+ min_length=min_length,
18
+ max_length=max_length,
19
+ temperature=temperature,
20
+ top_p=top_p,
21
+ truncation=True,
22
+ )
23
+ return out[0]["summary_text"]
24
+ def fn(*args):
25
+ return [_fn(*args, pipe=pipe) for pipe in pipes]
26
+
27
  import gradio as gr
28
+ interface = gr.Interface(
29
+ fn,
30
+ inputs=[
31
+ gr.inputs.Textbox(lines=10, label="input text"),
32
+ gr.inputs.Checkbox(label="do_sample", default=True),
33
+ gr.inputs.Slider(1, 128, step=1, default=64, label="min_length"),
34
+ gr.inputs.Slider(1, 128, step=1, default=64, label="max_length"),
35
+ gr.inputs.Slider(0.0, 1.0, step=0.1, default=1, label="temperature"),
36
+ gr.inputs.Slider(0.0, 1.0, step=0.1, default=1, label="top_p"),
37
+ ],
38
+ outputs=[
39
+ gr.outputs.Textbox(label=f"output by {name}"), for name in model_names
40
+ ],
41
+ examples=[[ex] for ex in examples],
42
+ title="T0 playground",
43
+ description="""
44
+ This is a playground for playing around with T0 models.
45
+ See https://huggingface.co/bigscience/T0 for more details
46
+ """,
47
+ )
48
+ interface.launch()
49
+