Kevin Fink commited on
Commit
dbcda41
·
1 Parent(s): f12c203
Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -78,9 +78,9 @@ def fine_tune_model(model, dataset_name, hub_id, api_key, num_epochs, batch_size
78
  weight_decay=0.01,
79
  #gradient_accumulation_steps=int(grad),
80
  #max_grad_norm = 3.0,
81
- load_best_model_at_end=True,
82
- metric_for_best_model="loss",
83
- greater_is_better=True,
84
  logging_dir='/data/logs',
85
  logging_steps=200,
86
  #push_to_hub=True,
@@ -160,7 +160,7 @@ def fine_tune_model(model, dataset_name, hub_id, api_key, num_epochs, batch_size
160
  del dataset['validation']
161
  test_set = dataset.map(tokenize_function, batched=True)
162
  test_set['test'].save_to_disk(f'/data/{hub_id.strip()}_test_dataset')
163
- return 'TRAINING DONE'
164
 
165
  elif os.access(f'/data/{hub_id.strip()}_validation_dataset', os.R_OK):
166
  dataset = load_dataset(dataset_name.strip())
 
78
  weight_decay=0.01,
79
  #gradient_accumulation_steps=int(grad),
80
  #max_grad_norm = 3.0,
81
+ #load_best_model_at_end=True,
82
+ #metric_for_best_model="loss",
83
+ #greater_is_better=True,
84
  logging_dir='/data/logs',
85
  logging_steps=200,
86
  #push_to_hub=True,
 
160
  del dataset['validation']
161
  test_set = dataset.map(tokenize_function, batched=True)
162
  test_set['test'].save_to_disk(f'/data/{hub_id.strip()}_test_dataset')
163
+ return 'TOKENS DONE'
164
 
165
  elif os.access(f'/data/{hub_id.strip()}_validation_dataset', os.R_OK):
166
  dataset = load_dataset(dataset_name.strip())