Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -12,13 +12,17 @@ examples = [
|
|
12 |
["GSHMSDNEDNFDGDDFDDVEEDEGLDDLENAEEEGQENVEILPSGERPQANQKRITTPYMTKYERARVLGTRALQIAMCAPVMVELEGETDPLLIAMKELKARKIPIIIRRYLPDGSYEDWGVDELIITD"]]
|
13 |
|
14 |
def get_out(sent):
|
|
|
|
|
|
|
|
|
15 |
encoded = tokenizer.encode_plus(sent, return_tensors="pt")
|
16 |
with torch.no_grad():
|
17 |
output = model(**encoded)
|
18 |
output = F.softmax(torch.squeeze(output['logits']))[1:-1,1].detach().numpy()
|
19 |
output = np.array2string(output, precision=4, separator=',',
|
20 |
suppress_small=True)
|
21 |
-
return output
|
22 |
|
23 |
gr.Interface(
|
24 |
get_out,
|
|
|
12 |
["GSHMSDNEDNFDGDDFDDVEEDEGLDDLENAEEEGQENVEILPSGERPQANQKRITTPYMTKYERARVLGTRALQIAMCAPVMVELEGETDPLLIAMKELKARKIPIIIRRYLPDGSYEDWGVDELIITD"]]
|
13 |
|
14 |
def get_out(sent):
|
15 |
+
prefix = ""
|
16 |
+
if len(sent)>1026:
|
17 |
+
sent = sent[:1026]
|
18 |
+
prefix = "Your protein was longer than 1026 AAs. We are working on including longer sequences but in the meantime, here are the scores for the first 1026 AAs: "
|
19 |
encoded = tokenizer.encode_plus(sent, return_tensors="pt")
|
20 |
with torch.no_grad():
|
21 |
output = model(**encoded)
|
22 |
output = F.softmax(torch.squeeze(output['logits']))[1:-1,1].detach().numpy()
|
23 |
output = np.array2string(output, precision=4, separator=',',
|
24 |
suppress_small=True)
|
25 |
+
return prefix+output
|
26 |
|
27 |
gr.Interface(
|
28 |
get_out,
|