Spaces:
Sleeping
Sleeping
paragon-analytics
commited on
Commit
•
1abbe27
1
Parent(s):
6981fa0
Update app.py
Browse files
app.py
CHANGED
@@ -1,6 +1,8 @@
|
|
1 |
import streamlit as st
|
2 |
import gradio as gr
|
3 |
import shap
|
|
|
|
|
4 |
import torch
|
5 |
import tensorflow as tf
|
6 |
import transformers
|
@@ -12,11 +14,21 @@ from transformers_interpret import SequenceClassificationExplainer
|
|
12 |
|
13 |
tokenizer = AutoTokenizer.from_pretrained("paragon-analytics/ADRv1")
|
14 |
model = AutoModelForSequenceClassification.from_pretrained("paragon-analytics/ADRv1")
|
|
|
|
|
15 |
|
16 |
cls_explainer = SequenceClassificationExplainer(
|
17 |
model,
|
18 |
tokenizer)
|
19 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
20 |
def adr_predict(x):
|
21 |
encoded_input = tokenizer(x, return_tensors='pt')
|
22 |
output = model(**encoded_input)
|
@@ -50,13 +62,20 @@ def adr_predict(x):
|
|
50 |
|
51 |
word_attributions = [(letter[i], score[i]) for i in range(0, len(letter))]
|
52 |
|
|
|
|
|
|
|
|
|
|
|
|
|
53 |
|
54 |
-
|
|
|
55 |
|
56 |
def main(text):
|
57 |
text = str(text).lower()
|
58 |
obj = adr_predict(text)
|
59 |
-
return obj[0],obj[1]
|
60 |
|
61 |
title = "Welcome to **ADR Detector** 🪐"
|
62 |
description1 = """
|
@@ -82,14 +101,16 @@ with gr.Blocks(title=title) as demo:
|
|
82 |
"--": "darkblue",
|
83 |
"-": "blue", "NA":"white"})
|
84 |
|
|
|
|
|
85 |
|
86 |
submit_btn.click(
|
87 |
main,
|
88 |
[text],
|
89 |
-
[label,intp], api_name="adr"
|
90 |
)
|
91 |
|
92 |
gr.Markdown("### Click on any of the examples below to see to what extent they contain resilience messaging:")
|
93 |
-
gr.Examples([["I have minor pain."],["I have severe pain."]], [text], [label,intp], main, cache_examples=True)
|
94 |
|
95 |
demo.launch()
|
|
|
1 |
import streamlit as st
|
2 |
import gradio as gr
|
3 |
import shap
|
4 |
+
import numpy as np
|
5 |
+
import scipy as sp
|
6 |
import torch
|
7 |
import tensorflow as tf
|
8 |
import transformers
|
|
|
14 |
|
15 |
tokenizer = AutoTokenizer.from_pretrained("paragon-analytics/ADRv1")
|
16 |
model = AutoModelForSequenceClassification.from_pretrained("paragon-analytics/ADRv1")
|
17 |
+
modelc = AutoModelForSequenceClassification.from_pretrained("paragon-analytics/ADRv1").cuda
|
18 |
+
|
19 |
|
20 |
cls_explainer = SequenceClassificationExplainer(
|
21 |
model,
|
22 |
tokenizer)
|
23 |
|
24 |
+
# define a prediction function
|
25 |
+
def f(x):
|
26 |
+
tv = torch.tensor([tokenizer.encode(v, padding='max_length', max_length=500, truncation=True) for v in x]).cuda()
|
27 |
+
outputs = modelc(tv)[0].detach().cpu().numpy()
|
28 |
+
scores = (np.exp(outputs).T / np.exp(outputs).sum(-1)).T
|
29 |
+
val = sp.special.logit(scores[:,1]) # use one vs rest logit units
|
30 |
+
return val
|
31 |
+
|
32 |
def adr_predict(x):
|
33 |
encoded_input = tokenizer(x, return_tensors='pt')
|
34 |
output = model(**encoded_input)
|
|
|
62 |
|
63 |
word_attributions = [(letter[i], score[i]) for i in range(0, len(letter))]
|
64 |
|
65 |
+
# SHAP:
|
66 |
+
# build an explainer using a token masker
|
67 |
+
explainer = shap.Explainer(f, tokenizer)
|
68 |
+
shap_values = explainer(str(x), fixed_context=1)
|
69 |
+
# plot the first sentence's explanation
|
70 |
+
plt = shap.plots.text(shap_values[0],display=False)
|
71 |
|
72 |
+
|
73 |
+
return {"Severe Reaction": float(scores.numpy()[1]), "Non-severe Reaction": float(scores.numpy()[0])}, word_attributions,plt
|
74 |
|
75 |
def main(text):
|
76 |
text = str(text).lower()
|
77 |
obj = adr_predict(text)
|
78 |
+
return obj[0],obj[1],obj[2]
|
79 |
|
80 |
title = "Welcome to **ADR Detector** 🪐"
|
81 |
description1 = """
|
|
|
101 |
"--": "darkblue",
|
102 |
"-": "blue", "NA":"white"})
|
103 |
|
104 |
+
shap = gr.HighlightedText(label="SHAP Scores",combine_adjacent=False)
|
105 |
+
|
106 |
|
107 |
submit_btn.click(
|
108 |
main,
|
109 |
[text],
|
110 |
+
[label,intp,shap], api_name="adr"
|
111 |
)
|
112 |
|
113 |
gr.Markdown("### Click on any of the examples below to see to what extent they contain resilience messaging:")
|
114 |
+
gr.Examples([["I have minor pain."],["I have severe pain."]], [text], [label,intp,shap], main, cache_examples=True)
|
115 |
|
116 |
demo.launch()
|