Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -1,15 +1,13 @@
|
|
1 |
-
import os
|
2 |
import gradio as gr
|
3 |
-
import
|
4 |
-
import numpy as np
|
5 |
-
from transformers import pipeline
|
6 |
|
7 |
-
|
8 |
-
|
9 |
-
|
|
|
|
|
|
|
10 |
|
11 |
-
pipe_flan = pipeline("text2text-generation", model="google/flan-ul2", model_kwargs={"load_in_8bit":True, "device_map": "auto"})
|
12 |
-
pipe_vanilla = pipeline("text2text-generation", model="t5-large", device="cuda:0", model_kwargs={"torch_dtype":torch.bfloat16})
|
13 |
|
14 |
examples = [
|
15 |
["Please answer to the following question. Who is going to be the next Ballon d'or?"],
|
@@ -27,20 +25,20 @@ A: Roger started with 5 balls. 2 cans of 3 tennis balls each is 6 tennis balls.
|
|
27 |
Q: A juggler can juggle 16 balls. Half of the balls are golf balls, and half of the golf balls are blue. How many blue golf balls are there?"""]
|
28 |
]
|
29 |
|
30 |
-
title = "Flan UL2
|
31 |
-
description = "This demo compares [T5-
|
32 |
|
33 |
def inference(text):
|
34 |
-
|
35 |
-
|
36 |
-
|
37 |
|
38 |
io = gr.Interface(
|
39 |
inference,
|
40 |
gr.Textbox(lines=3),
|
41 |
outputs=[
|
42 |
-
gr.Textbox(lines=3, label="Flan UL2"),
|
43 |
-
gr.Textbox(lines=3, label="T5
|
44 |
],
|
45 |
title=title,
|
46 |
description=description,
|
|
|
|
|
1 |
import gradio as gr
|
2 |
+
import requests
|
|
|
|
|
3 |
|
4 |
+
UL2_API_URL = "https://api-inference.huggingface.co/models/google/flan-ul2"
|
5 |
+
FLAN_API_URL = "https://api-inference.huggingface.co/models/google/flan-t5-xxl"
|
6 |
+
|
7 |
+
def query(payload, api_url):
|
8 |
+
response = requests.request("POST", api_url, json={"inputs":payload})
|
9 |
+
return response.json()
|
10 |
|
|
|
|
|
11 |
|
12 |
examples = [
|
13 |
["Please answer to the following question. Who is going to be the next Ballon d'or?"],
|
|
|
25 |
Q: A juggler can juggle 16 balls. Half of the balls are golf balls, and half of the golf balls are blue. How many blue golf balls are there?"""]
|
26 |
]
|
27 |
|
28 |
+
title = "Flan UL2 vs Flan T5 XXL"
|
29 |
+
description = "This demo compares [Flan-T5-xxl](https://huggingface.co/google/flan-t5-xxl) and [Flan-UL2](https://huggingface.co/google/flan-ul2). Learn more about these models in their model card!"
|
30 |
|
31 |
def inference(text):
|
32 |
+
output_ul2 = query(text, api_url=UL2_API_URL)[0]["generated_text"]
|
33 |
+
output_flan = query(text, api_url=FLAN_API_URL)[0]["generated_text"]
|
34 |
+
return [output_ul2, output_flan]
|
35 |
|
36 |
io = gr.Interface(
|
37 |
inference,
|
38 |
gr.Textbox(lines=3),
|
39 |
outputs=[
|
40 |
+
gr.Textbox(lines=3, label="Flan T5-UL2"),
|
41 |
+
gr.Textbox(lines=3, label="Flan T5-XXL")
|
42 |
],
|
43 |
title=title,
|
44 |
description=description,
|