Spaces:
Runtime error
Runtime error
first commit
Browse files- __pycache__/model.cpython-37.pyc +0 -0
- app.py +17 -0
- app_stream.py +14 -0
- model.pth +3 -0
- model.py +5 -4
- requirements.txt +6 -0
__pycache__/model.cpython-37.pyc
ADDED
Binary file (6.46 kB). View file
|
|
app.py
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
import model
|
3 |
+
|
4 |
+
|
5 |
+
input_1 = gr.inputs.Textbox(lines=1, placeholder='Patient History', default="", label=None, optional=False)
|
6 |
+
input_2 = gr.inputs.Textbox(lines=1, placeholder='Feature Text', default="", label=None, optional=False)
|
7 |
+
|
8 |
+
output_1 = gr.outputs.Textbox(self, type="auto", label=None)
|
9 |
+
|
10 |
+
gr.Interface(
|
11 |
+
model.get_predictions,
|
12 |
+
inputs=[input_1, input_2],
|
13 |
+
outputs=[output_1],
|
14 |
+
title='Identify Key Phrases in Patient Notes from Medical Licensing Exams',
|
15 |
+
theme='dark',
|
16 |
+
)
|
17 |
+
gr.launch()
|
app_stream.py
ADDED
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
import model
|
3 |
+
|
4 |
+
st.title('Identify Key Phrases in Patient Notes from Medical Licensing Exams')
|
5 |
+
|
6 |
+
pn_history = st.text_area("Patient History")
|
7 |
+
feature_text = st.text_input('Feature Text')
|
8 |
+
|
9 |
+
label='submit'
|
10 |
+
if st.button(label):
|
11 |
+
|
12 |
+
if pn_history and feature_text != '':
|
13 |
+
pred = model.get_predictions(pn_history, feature_text)
|
14 |
+
st.write(pred)
|
model.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b6cd45fa27f18598291557baea3fcafe14c0132bb506c4e2e23b294779daa5a5
|
3 |
+
size 498669869
|
model.py
CHANGED
@@ -6,7 +6,7 @@ from tqdm.notebook import tqdm, trange
|
|
6 |
|
7 |
import torch
|
8 |
from torch import nn
|
9 |
-
import transformers
|
10 |
from transformers import AutoModel, AutoTokenizer, AutoConfig
|
11 |
|
12 |
|
@@ -237,14 +237,15 @@ def predict_location_preds(tokenizer, model, feature_text, pn_history):
|
|
237 |
|
238 |
def get_predictions(feature_text, pn_history):
|
239 |
location_preds, pred_string = predict_location_preds(tokenizer, model, [feature_text], [pn_history])
|
240 |
-
print(pred_string)
|
|
|
241 |
|
242 |
tokenizer = AutoTokenizer.from_pretrained(config['tokenizer_path'])
|
243 |
-
path = '
|
244 |
|
245 |
model = NBMEModel().to(config['device'])
|
246 |
model.load_state_dict(torch.load(path, map_location=torch.device(config['device']))['model'])
|
247 |
-
model.eval()
|
248 |
|
249 |
# input_text = create_sample_test()
|
250 |
# feature_text = input_text.feature_text[0]
|
|
|
6 |
|
7 |
import torch
|
8 |
from torch import nn
|
9 |
+
# import transformers
|
10 |
from transformers import AutoModel, AutoTokenizer, AutoConfig
|
11 |
|
12 |
|
|
|
237 |
|
238 |
def get_predictions(feature_text, pn_history):
|
239 |
location_preds, pred_string = predict_location_preds(tokenizer, model, [feature_text], [pn_history])
|
240 |
+
# print(pred_string)
|
241 |
+
return pred_string
|
242 |
|
243 |
tokenizer = AutoTokenizer.from_pretrained(config['tokenizer_path'])
|
244 |
+
path = 'model.pth'
|
245 |
|
246 |
model = NBMEModel().to(config['device'])
|
247 |
model.load_state_dict(torch.load(path, map_location=torch.device(config['device']))['model'])
|
248 |
+
model.eval()
|
249 |
|
250 |
# input_text = create_sample_test()
|
251 |
# feature_text = input_text.feature_text[0]
|
requirements.txt
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
gc
|
2 |
+
numpy
|
3 |
+
pandas
|
4 |
+
torch==1.9.1+cpu
|
5 |
+
transformers==4.12.5
|
6 |
+
tqdm
|