checkpoint update
Browse files
model.py
CHANGED
@@ -103,6 +103,7 @@ Output:'''
|
|
103 |
out = self.llm_model.generate(
|
104 |
inputs_embeds=combined_embeds,
|
105 |
max_new_tokens=max_new_tokens,
|
|
|
106 |
).cpu().tolist()[0]
|
107 |
|
108 |
output_text = self.llm_tokenizer.decode(out, skip_special_tokens=True)
|
|
|
103 |
out = self.llm_model.generate(
|
104 |
inputs_embeds=combined_embeds,
|
105 |
max_new_tokens=max_new_tokens,
|
106 |
+
pad_token_id=self.llm_tokenizer.pad_token_id
|
107 |
).cpu().tolist()[0]
|
108 |
|
109 |
output_text = self.llm_tokenizer.decode(out, skip_special_tokens=True)
|