davila7 commited on
Commit
3c355ba
1 Parent(s): d0ae07d

flan t5 vs gpt2

Browse files
Files changed (1) hide show
  1. app.py +10 -10
app.py CHANGED
@@ -12,20 +12,20 @@ print(f"Is CUDA available: {torch.cuda.is_available()}")
12
  print(f"CUDA device: {torch.cuda.get_device_name(torch.cuda.current_device())}")
13
 
14
  pipe_biogpt = pipeline("text-generation", model="microsoft/BioGPT-Large", device="cuda:0", model_kwargs={"torch_dtype":torch.bfloat16})
15
- pipe_flan_ul2 = pipeline("text-generation", model="google/flan-ul2", device="cuda:0", model_kwargs={"torch_dtype":torch.bfloat16})
16
- pipe_galactica = pipeline("text-generation", model="facebook/galactica-1.3b", device="cuda:0", model_kwargs={"torch_dtype":torch.bfloat16})
17
 
18
- title = "Compare generative biomedical LLMs!"
19
- description = "**Disclaimer:** this demo was made for research purposes only and should not be used for medical purposes."
20
 
21
  def inference(text):
22
  output_biogpt = pipe_biogpt(text, max_length=100)[0]["generated_text"]
23
- output_flanul2 = pipe_flan_ul2(text, max_length=100)[0]["generated_text"]
24
- output_galactica = pipe_galactica(text, max_length=100)[0]["generated_text"]
25
  return [
26
  output_biogpt,
27
- output_flanul2,
28
- output_galactica
29
  ]
30
 
31
  io = gr.Interface(
@@ -33,8 +33,8 @@ io = gr.Interface(
33
  gr.Textbox(lines=3),
34
  outputs=[
35
  gr.Textbox(lines=3, label="BioGPT-Large"),
36
- gr.Textbox(lines=3, label="Flan UL2"),
37
- gr.Textbox(lines=3, label="Galactica 1.3B"),
38
  ],
39
  title=title,
40
  description=description,
 
12
  print(f"CUDA device: {torch.cuda.get_device_name(torch.cuda.current_device())}")
13
 
14
  pipe_biogpt = pipeline("text-generation", model="microsoft/BioGPT-Large", device="cuda:0", model_kwargs={"torch_dtype":torch.bfloat16})
15
+ pipe_flan_t5_xxl = pipeline("text-generation", model="google/flan-t5-xxl", device="cuda:0", model_kwargs={"torch_dtype":torch.bfloat16})
16
+ pipe_gpt_2 = pipeline("text-generation", model="gpt2", device="cuda:0", model_kwargs={"torch_dtype":torch.bfloat16})
17
 
18
+ title = "LLM vs LLM!"
19
+ description = "**Disclaimer:** this demo was made for research purposes only."
20
 
21
  def inference(text):
22
  output_biogpt = pipe_biogpt(text, max_length=100)[0]["generated_text"]
23
+ output_flan_t5_xxl = pipe_flan_t5_xxl(text, max_length=100)[0]["generated_text"]
24
+ output_gpt_2 = pipe_gpt_2(text, max_length=100)[0]["generated_text"]
25
  return [
26
  output_biogpt,
27
+ output_flan_t5_xxl,
28
+ output_gpt_2
29
  ]
30
 
31
  io = gr.Interface(
 
33
  gr.Textbox(lines=3),
34
  outputs=[
35
  gr.Textbox(lines=3, label="BioGPT-Large"),
36
+ gr.Textbox(lines=3, label="Flan T5 XXL"),
37
+ gr.Textbox(lines=3, label="GPT-2"),
38
  ],
39
  title=title,
40
  description=description,