davila7 commited on
Commit
ae59d89
1 Parent(s): 3c355ba
Files changed (1) hide show
  1. app.py +5 -9
app.py CHANGED
@@ -4,7 +4,7 @@ import torch
4
  import numpy as np
5
  from transformers import pipeline
6
 
7
- name_list = ['microsoft/biogpt', 'google/flan-ul2', 'facebook/galactica-1.3b']
8
 
9
  examples = [['COVID-19 is'],['A 65-year-old female patient with a past medical history of']]
10
 
@@ -12,20 +12,17 @@ print(f"Is CUDA available: {torch.cuda.is_available()}")
12
  print(f"CUDA device: {torch.cuda.get_device_name(torch.cuda.current_device())}")
13
 
14
  pipe_biogpt = pipeline("text-generation", model="microsoft/BioGPT-Large", device="cuda:0", model_kwargs={"torch_dtype":torch.bfloat16})
15
- pipe_flan_t5_xxl = pipeline("text-generation", model="google/flan-t5-xxl", device="cuda:0", model_kwargs={"torch_dtype":torch.bfloat16})
16
- pipe_gpt_2 = pipeline("text-generation", model="gpt2", device="cuda:0", model_kwargs={"torch_dtype":torch.bfloat16})
17
 
18
  title = "LLM vs LLM!"
19
  description = "**Disclaimer:** this demo was made for research purposes only."
20
 
21
  def inference(text):
22
  output_biogpt = pipe_biogpt(text, max_length=100)[0]["generated_text"]
23
- output_flan_t5_xxl = pipe_flan_t5_xxl(text, max_length=100)[0]["generated_text"]
24
- output_gpt_2 = pipe_gpt_2(text, max_length=100)[0]["generated_text"]
25
  return [
26
  output_biogpt,
27
- output_flan_t5_xxl,
28
- output_gpt_2
29
  ]
30
 
31
  io = gr.Interface(
@@ -33,8 +30,7 @@ io = gr.Interface(
33
  gr.Textbox(lines=3),
34
  outputs=[
35
  gr.Textbox(lines=3, label="BioGPT-Large"),
36
- gr.Textbox(lines=3, label="Flan T5 XXL"),
37
- gr.Textbox(lines=3, label="GPT-2"),
38
  ],
39
  title=title,
40
  description=description,
 
4
  import numpy as np
5
  from transformers import pipeline
6
 
7
+ name_list = ['microsoft/biogpt', 'google/flan-ul2']
8
 
9
  examples = [['COVID-19 is'],['A 65-year-old female patient with a past medical history of']]
10
 
 
12
  print(f"CUDA device: {torch.cuda.get_device_name(torch.cuda.current_device())}")
13
 
14
  pipe_biogpt = pipeline("text-generation", model="microsoft/BioGPT-Large", device="cuda:0", model_kwargs={"torch_dtype":torch.bfloat16})
15
+ pipe_flan_ul2 = pipeline("text-generation", model="google/flan-ul2", device="cuda:0", model_kwargs={"torch_dtype":torch.bfloat16})
 
16
 
17
  title = "LLM vs LLM!"
18
  description = "**Disclaimer:** this demo was made for research purposes only."
19
 
20
  def inference(text):
21
  output_biogpt = pipe_biogpt(text, max_length=100)[0]["generated_text"]
22
+ output_flan_ul2 = pipe_flan_ul2(text, max_length=100)[0]["generated_text"]
 
23
  return [
24
  output_biogpt,
25
+ output_flan_ul2
 
26
  ]
27
 
28
  io = gr.Interface(
 
30
  gr.Textbox(lines=3),
31
  outputs=[
32
  gr.Textbox(lines=3, label="BioGPT-Large"),
33
+ gr.Textbox(lines=3, label="Flan UL2"),
 
34
  ],
35
  title=title,
36
  description=description,