Kevin Fink commited on
Commit
b755227
·
1 Parent(s): d767c85
Files changed (1) hide show
  1. app.py +5 -3
app.py CHANGED
@@ -109,7 +109,8 @@ def fine_tune_model(model, dataset_name, hub_id, api_key, num_epochs, batch_size
109
  #max_length=max_length, # Set to None for dynamic padding
110
  #truncation=True,
111
  #padding='max_length',
112
- #return_tensors='pt',
 
113
  )
114
 
115
  # Setup the decoder input IDs (shifted right)
@@ -118,8 +119,9 @@ def fine_tune_model(model, dataset_name, hub_id, api_key, num_epochs, batch_size
118
  #max_length=128, # Set to None for dynamic padding
119
  #truncation=True,
120
  #padding='max_length',
121
- ##text_target=examples['target'],
122
- #return_tensors='pt',
 
123
  )
124
  #labels["input_ids"] = [
125
  # [(l if l != tokenizer.pad_token_id else -100) for l in label] for label in labels["input_ids"]
 
109
  #max_length=max_length, # Set to None for dynamic padding
110
  #truncation=True,
111
  #padding='max_length',
112
+ #return_tensors='pt',
113
+ padding=True,
114
  )
115
 
116
  # Setup the decoder input IDs (shifted right)
 
119
  #max_length=128, # Set to None for dynamic padding
120
  #truncation=True,
121
  #padding='max_length',
122
+ #text_target=examples['target'],
123
+ #return_tensors='pt',
124
+ padding=True,
125
  )
126
  #labels["input_ids"] = [
127
  # [(l if l != tokenizer.pad_token_id else -100) for l in label] for label in labels["input_ids"]