model_kwargs are not used by model

#6
by a749734 - opened

Traceback (most recent call last):
File "/home/cloud-user/testing/fine_tune.py", line 78, in
tokens = model.generate(**inputs, generation_config=generation_config).to(device)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/cloud-user/anaconda3/envs/fid_env/lib/python3.11/site-packages/torch/utils/_contextlib.py", line 115, in decorate_context
return func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "/home/cloud-user/anaconda3/envs/fid_env/lib/python3.11/site-packages/transformers/generation/utils.py", line 1267, in generate
self._validate_model_kwargs(model_kwargs.copy())
File "/home/cloud-user/anaconda3/envs/fid_env/lib/python3.11/site-packages/transformers/generation/utils.py", line 1140, in _validate_model_kwargs
raise ValueError(
ValueError: The following model_kwargs are not used by the model: ['token_type_ids'] (note: typos in the generate arguments will also show up in this list)

i m using this code

generation_config = GenerationConfig(
#max_new tokn =
temperature=0.4,
repetition_penalty=1.3,
# do_sample = True,
# early_stopping = False,
min_length = 21,
max_length = 21
)

inputs = tokenizer(
prompt, padding=False, add_special_tokens=False, return_tensors="pt"
).to(device)

with torch.inference_mode():
tokens = model.generate(**inputs, generation_config=generation_config).to(device)

completion = tokenizer.decode(tokens[0], skip_special_tokens=True)

Sign up or log in to comment