joechou commited on
Commit
befa598
1 Parent(s): 5e6557c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -25
app.py CHANGED
@@ -1,4 +1,4 @@
1
- '''
2
  import argparse
3
  import random
4
  import numpy as np
@@ -14,13 +14,6 @@ from transformers.models.bert.tokenization_bert import BertTokenizer
14
  import sentencepiece as spm
15
 
16
 
17
- logging.getLogger("lightning").setLevel(logging.ERROR)
18
- def fxn():
19
- warnings.warn("deprecated", DeprecationWarning)
20
-
21
- with warnings.catch_warnings():
22
- warnings.simplefilter("ignore")
23
- fxn()
24
 
25
 
26
 
@@ -33,7 +26,7 @@ add_args(args)
33
 
34
 
35
 
36
- #model = KobeModel(args)
37
 
38
 
39
 
@@ -54,26 +47,14 @@ cond_tokenizer.Load(args.cond_vocab_file)
54
 
55
 
56
 
57
-
58
-
59
-
60
  #model = model.load_from_checkpoint("/root/kobe-v2/1ja19m5t/checkpoints/epoch=19-step=66080.ckpt", args=args)
61
  #model = model.load_from_checkpoint("/root/kobe-v2/37ht1cvz/checkpoints/epoch=11-step=396384.ckpt", args=args)
62
 
63
- trainer = pl.Trainer(accelerator='gpu', devices=1, max_epochs=-1)
64
- '''
65
-
66
-
67
- import streamlit as st
68
-
69
- st.write("Most appearing words including stopwords")
70
 
71
 
72
- choice = st.selectbox(
73
 
74
- 'Select the items you want?',
75
-
76
- ('Pen','Pencil','Eraser','Sharpener','Notebook'))
77
 
78
 
79
  input1 = st.selectbox(
@@ -179,7 +160,7 @@ cond = aspect+" "+cond
179
  #print(fact)
180
  #print(cond)
181
 
182
- '''
183
  tokenizer = text_tokenizer
184
  title_token_ids=tokenizer.encode(title, add_special_tokens=False)
185
  condition_token_ids=cond_tokenizer.EncodeAsIds(cond)
@@ -199,4 +180,3 @@ dm = KobeDataModule(
199
  for d in dm.test_dataloader():
200
  st.write("result:")
201
  st.write(''.join(model.test_step(d ,1)).replace(" ",""))
202
- '''
 
1
+
2
  import argparse
3
  import random
4
  import numpy as np
 
14
  import sentencepiece as spm
15
 
16
 
 
 
 
 
 
 
 
17
 
18
 
19
 
 
26
 
27
 
28
 
29
+ model = KobeModel(args)
30
 
31
 
32
 
 
47
 
48
 
49
 
 
 
 
50
  #model = model.load_from_checkpoint("/root/kobe-v2/1ja19m5t/checkpoints/epoch=19-step=66080.ckpt", args=args)
51
  #model = model.load_from_checkpoint("/root/kobe-v2/37ht1cvz/checkpoints/epoch=11-step=396384.ckpt", args=args)
52
 
53
+ trainer = pl.Trainer()
 
 
 
 
 
 
54
 
55
 
 
56
 
57
+ import streamlit as st
 
 
58
 
59
 
60
  input1 = st.selectbox(
 
160
  #print(fact)
161
  #print(cond)
162
 
163
+
164
  tokenizer = text_tokenizer
165
  title_token_ids=tokenizer.encode(title, add_special_tokens=False)
166
  condition_token_ids=cond_tokenizer.EncodeAsIds(cond)
 
180
  for d in dm.test_dataloader():
181
  st.write("result:")
182
  st.write(''.join(model.test_step(d ,1)).replace(" ",""))