Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import gradio as gr
|
3 |
+
import torch
|
4 |
+
from transformers import pipeline
|
5 |
+
|
6 |
+
print(f"Is CUDA available: {torch.cuda.is_available()}")
|
7 |
+
print(f"CUDA device: {torch.cuda.get_device_name(torch.cuda.current_device())}")
|
8 |
+
|
9 |
+
examples = [['question: Should chest wall irradiation be included after mastectomy and negative node breast cancer? context: This study aims to evaluate local failure patterns in node negative breast cancer patients treated with post-mastectomy radiotherapy including internal mammary chain only. Retrospective analysis of 92 internal or central-breast node-negative tumours with mastectomy and external irradiation of the internal mammary chain at the dose of 50 Gy, from 1994 to 1998. Local recurrence rate was 5 % (five cases). Recurrence sites were the operative scare and chest wall. Factors associated with increased risk of local failure were age<or = 40 years and tumour size greater than 20mm, without statistical significance. answer: Post-mastectomy radiotherapy should be discussed for a sub-group of node-negative patients with predictors factors of local failure such as age<or = 40 years and larger tumour size.']]
|
10 |
+
|
11 |
+
pipe_biogpt = pipeline("text-generation", model="microsoft/biogpt-large-pubmedqa", device="cuda:0")
|
12 |
+
|
13 |
+
title = "BioGPT Q&A Demo"
|
14 |
+
description = """
|
15 |
+
Check out the [BioGPT-Large-PubMedQA model card](https://huggingface.co/microsoft/biogpt-large-pubmedqa) for more info.
|
16 |
+
**Disclaimer:** this demo was made for research purposes only and should not be used for medical purposes.
|
17 |
+
"""
|
18 |
+
|
19 |
+
def inference(text):
|
20 |
+
output_biogpt = pipe_biogpt(text, max_length=100)[0]["generated_text"]
|
21 |
+
return [
|
22 |
+
output_biogpt,
|
23 |
+
]
|
24 |
+
|
25 |
+
io = gr.Interface(
|
26 |
+
inference,
|
27 |
+
gr.Textbox(lines=3),
|
28 |
+
outputs=[
|
29 |
+
gr.Textbox(lines=3, label="BioGPT-Large"),
|
30 |
+
],
|
31 |
+
title=title,
|
32 |
+
description=description,
|
33 |
+
examples=examples
|
34 |
+
)
|
35 |
+
io.launch()
|