davila7 commited on
Commit
d0ae07d
1 Parent(s): dc87dbb
Files changed (1) hide show
  1. app.py +5 -5
app.py CHANGED
@@ -4,7 +4,7 @@ import torch
4
  import numpy as np
5
  from transformers import pipeline
6
 
7
- name_list = ['microsoft/biogpt', 'stanford-crfm/BioMedLM', 'facebook/galactica-1.3b']
8
 
9
  examples = [['COVID-19 is'],['A 65-year-old female patient with a past medical history of']]
10
 
@@ -12,7 +12,7 @@ print(f"Is CUDA available: {torch.cuda.is_available()}")
12
  print(f"CUDA device: {torch.cuda.get_device_name(torch.cuda.current_device())}")
13
 
14
  pipe_biogpt = pipeline("text-generation", model="microsoft/BioGPT-Large", device="cuda:0", model_kwargs={"torch_dtype":torch.bfloat16})
15
- pipe_biomedlm = pipeline("text-generation", model="stanford-crfm/BioMedLM", device="cuda:0", model_kwargs={"torch_dtype":torch.bfloat16})
16
  pipe_galactica = pipeline("text-generation", model="facebook/galactica-1.3b", device="cuda:0", model_kwargs={"torch_dtype":torch.bfloat16})
17
 
18
  title = "Compare generative biomedical LLMs!"
@@ -20,11 +20,11 @@ description = "**Disclaimer:** this demo was made for research purposes only and
20
 
21
  def inference(text):
22
  output_biogpt = pipe_biogpt(text, max_length=100)[0]["generated_text"]
23
- output_biomedlm = pipe_biomedlm(text, max_length=100)[0]["generated_text"]
24
  output_galactica = pipe_galactica(text, max_length=100)[0]["generated_text"]
25
  return [
26
  output_biogpt,
27
- output_biomedlm,
28
  output_galactica
29
  ]
30
 
@@ -33,7 +33,7 @@ io = gr.Interface(
33
  gr.Textbox(lines=3),
34
  outputs=[
35
  gr.Textbox(lines=3, label="BioGPT-Large"),
36
- gr.Textbox(lines=3, label="BioMedLM (fka PubmedGPT)"),
37
  gr.Textbox(lines=3, label="Galactica 1.3B"),
38
  ],
39
  title=title,
 
4
  import numpy as np
5
  from transformers import pipeline
6
 
7
+ name_list = ['microsoft/biogpt', 'google/flan-ul2', 'facebook/galactica-1.3b']
8
 
9
  examples = [['COVID-19 is'],['A 65-year-old female patient with a past medical history of']]
10
 
 
12
  print(f"CUDA device: {torch.cuda.get_device_name(torch.cuda.current_device())}")
13
 
14
  pipe_biogpt = pipeline("text-generation", model="microsoft/BioGPT-Large", device="cuda:0", model_kwargs={"torch_dtype":torch.bfloat16})
15
+ pipe_flan_ul2 = pipeline("text-generation", model="google/flan-ul2", device="cuda:0", model_kwargs={"torch_dtype":torch.bfloat16})
16
  pipe_galactica = pipeline("text-generation", model="facebook/galactica-1.3b", device="cuda:0", model_kwargs={"torch_dtype":torch.bfloat16})
17
 
18
  title = "Compare generative biomedical LLMs!"
 
20
 
21
  def inference(text):
22
  output_biogpt = pipe_biogpt(text, max_length=100)[0]["generated_text"]
23
+ output_flanul2 = pipe_flan_ul2(text, max_length=100)[0]["generated_text"]
24
  output_galactica = pipe_galactica(text, max_length=100)[0]["generated_text"]
25
  return [
26
  output_biogpt,
27
+ output_flanul2,
28
  output_galactica
29
  ]
30
 
 
33
  gr.Textbox(lines=3),
34
  outputs=[
35
  gr.Textbox(lines=3, label="BioGPT-Large"),
36
+ gr.Textbox(lines=3, label="Flan UL2"),
37
  gr.Textbox(lines=3, label="Galactica 1.3B"),
38
  ],
39
  title=title,