supermy commited on
Commit
17b3b4b
1 Parent(s): 0a02563

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -4
app.py CHANGED
@@ -1,4 +1,3 @@
1
-
2
  import torch
3
 
4
  import gradio as gr
@@ -6,7 +5,7 @@ import torch.nn.functional as F
6
 
7
  from transformers import BertTokenizer, GPT2LMHeadModel,PreTrainedTokenizerFast
8
  # tokenizer = BertTokenizer.from_pretrained("supermy/poetry")
9
- tokenizer = BertTokenizer(vocab_file="poetry-bpe.json")
10
  model = GPT2LMHeadModel.from_pretrained("supermy/poetry")
11
  model.eval()
12
 
@@ -27,13 +26,13 @@ def top_k_top_p_filtering( logits, top_k=0, top_p=0.0, filter_value=-float('Inf'
27
  return logits
28
 
29
  def generate(title, context, max_len):
30
- print(title, context, max_len)
31
  # input_ids.extend( tokenizer.encode(input_text + "-", add_special_tokens=False) )
32
 
33
  title_ids = tokenizer.encode(title, add_special_tokens=False)
34
  context_ids = tokenizer.encode(context, add_special_tokens=False)
35
  print(title_ids,context_ids)
36
-
37
  input_ids = title_ids + [sep_id] + context_ids
38
 
39
  cur_len = len(input_ids)
 
 
1
  import torch
2
 
3
  import gradio as gr
 
5
 
6
  from transformers import BertTokenizer, GPT2LMHeadModel,PreTrainedTokenizerFast
7
  # tokenizer = BertTokenizer.from_pretrained("supermy/poetry")
8
+ tokenizer = PreTrainedTokenizerFast(vocab_file="poetry-bpe.json")
9
  model = GPT2LMHeadModel.from_pretrained("supermy/poetry")
10
  model.eval()
11
 
 
26
  return logits
27
 
28
  def generate(title, context, max_len):
29
+
30
  # input_ids.extend( tokenizer.encode(input_text + "-", add_special_tokens=False) )
31
 
32
  title_ids = tokenizer.encode(title, add_special_tokens=False)
33
  context_ids = tokenizer.encode(context, add_special_tokens=False)
34
  print(title_ids,context_ids)
35
+
36
  input_ids = title_ids + [sep_id] + context_ids
37
 
38
  cur_len = len(input_ids)