joechou commited on
Commit
9603d95
·
1 Parent(s): 8caa49d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -8
app.py CHANGED
@@ -165,22 +165,22 @@ if input3==3:
165
  #print(cond)
166
 
167
 
168
- tokenizer = text_tokenizer
169
- title_token_ids=tokenizer.encode(st.session_state.title, add_special_tokens=False)
170
- condition_token_ids=cond_tokenizer.EncodeAsIds(st.session_state.aspect+" "+st.session_state.cat)
171
- fact_token_ids=tokenizer.encode(st.session_state.fact, add_special_tokens=False)
172
-
173
- e = Example(title_token_ids, condition_token_ids, fact_token_ids)
174
 
 
175
 
176
- dm = KobeDataModule(
 
 
 
 
 
 
177
  [e],
178
  args.text_vocab_path,
179
  args.max_seq_len,
180
  1,
181
  1,
182
  )
183
- if st.button('result'):
184
 
185
  for d in dm.test_dataloader():
186
  st.write(st.session_state.title)
 
165
  #print(cond)
166
 
167
 
 
 
 
 
 
 
168
 
169
+ if st.button('result'):
170
 
171
+ tokenizer = text_tokenizer
172
+ title_token_ids=tokenizer.encode(st.session_state.title, add_special_tokens=False)
173
+ condition_token_ids=cond_tokenizer.EncodeAsIds(st.session_state.aspect+" "+st.session_state.cat)
174
+ fact_token_ids=tokenizer.encode(st.session_state.fact, add_special_tokens=False)
175
+ e = Example(title_token_ids, condition_token_ids, fact_token_ids)
176
+
177
+ dm = KobeDataModule(
178
  [e],
179
  args.text_vocab_path,
180
  args.max_seq_len,
181
  1,
182
  1,
183
  )
 
184
 
185
  for d in dm.test_dataloader():
186
  st.write(st.session_state.title)