File size: 208 Bytes
05a0e7d
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
# evaluate the base gpt2
# n_layer=12, n_head=12, n_embd=768
# 124M parameters
batch_size = 8
eval_iters = 500 # use more iterations to get good estimate
eval_only = True
wandb_log = False
init_from = 'gpt2'