\n",
"[2022-12-16 13:29:35,627] [INFO] [config.py:1024:print] nebula_config ................ {\n",
" \"enabled\": false, \n",
" \"persistent_storage_path\": null, \n",
" \"persistent_time_interval\": 100, \n",
" \"num_of_version_in_retention\": 2, \n",
" \"enable_nebula_load\": true, \n",
" \"load_path\": null\n",
"}\n",
"[2022-12-16 13:29:35,627] [INFO] [config.py:1024:print] optimizer_legacy_fusion ...... False\n",
"[2022-12-16 13:29:35,627] [INFO] [config.py:1024:print] optimizer_name ............... adamw\n",
"[2022-12-16 13:29:35,628] [INFO] [config.py:1024:print] optimizer_params ............. {'lr': 1e-05, 'betas': [0.9, 0.999], 'eps': 1e-08, 'weight_decay': 0.0}\n",
"[2022-12-16 13:29:35,628] [INFO] [config.py:1024:print] pipeline ..................... {'stages': 'auto', 'partition': 'best', 'seed_layers': False, 'activation_checkpoint_interval': 0}\n",
"[2022-12-16 13:29:35,628] [INFO] [config.py:1024:print] pld_enabled .................. False\n",
"[2022-12-16 13:29:35,629] [INFO] [config.py:1024:print] pld_params ................... False\n",
"[2022-12-16 13:29:35,629] [INFO] [config.py:1024:print] prescale_gradients ........... False\n",
"[2022-12-16 13:29:35,630] [INFO] [config.py:1024:print] scheduler_name ............... WarmupLR\n",
"[2022-12-16 13:29:35,630] [INFO] [config.py:1024:print] scheduler_params ............. {'warmup_min_lr': 0, 'warmup_max_lr': 1e-05, 'warmup_num_steps': 100}\n",
"[2022-12-16 13:29:35,630] [INFO] [config.py:1024:print] sparse_attention ............. None\n",
"[2022-12-16 13:29:35,631] [INFO] [config.py:1024:print] sparse_gradients_enabled ..... False\n",
"[2022-12-16 13:29:35,631] [INFO] [config.py:1024:print] steps_per_print .............. 10\n",
"[2022-12-16 13:29:35,631] [INFO] [config.py:1024:print] train_batch_size ............. 64\n",
"[2022-12-16 13:29:35,632] [INFO] [config.py:1024:print] train_micro_batch_size_per_gpu 32\n",
"[2022-12-16 13:29:35,632] [INFO] [config.py:1024:print] use_node_local_storage ....... False\n",
"[2022-12-16 13:29:35,633] [INFO] [config.py:1024:print] wall_clock_breakdown ......... False\n",
"[2022-12-16 13:29:35,633] [INFO] [config.py:1024:print] world_size ................... 1\n",
"[2022-12-16 13:29:35,633] [INFO] [config.py:1024:print] zero_allow_untested_optimizer False\n",
"[2022-12-16 13:29:35,634] [INFO] [config.py:1024:print] zero_config .................. stage=2 contiguous_gradients=True reduce_scatter=True reduce_bucket_size=200000000 allgather_partitions=True allgather_bucket_size=200000000 overlap_comm=True load_from_fp32_weights=True elastic_checkpoint=False offload_param=None offload_optimizer=DeepSpeedZeroOffloadOptimizerConfig(device='cpu', nvme_path=None, buffer_count=4, pin_memory=True, pipeline=False, pipeline_read=False, pipeline_write=False, fast_init=False) sub_group_size=1,000,000,000 cpu_offload_param=None cpu_offload_use_pin_memory=None cpu_offload=None prefetch_bucket_size=50,000,000 param_persistence_threshold=100,000 model_persistence_threshold=sys.maxsize max_live_parameters=1,000,000,000 max_reuse_distance=1,000,000,000 gather_16bit_weights_on_model_save=False stage3_gather_fp16_weights_on_model_save=False ignore_unused_parameters=True legacy_stage1=False round_robin_gradients=False\n",
"[2022-12-16 13:29:35,634] [INFO] [config.py:1024:print] zero_enabled ................. True\n",
"[2022-12-16 13:29:35,635] [INFO] [config.py:1024:print] zero_optimization_stage ...... 2\n",
"[2022-12-16 13:29:35,635] [INFO] [config.py:1009:print_user_config] json = {\n",
" \"fp16\": {\n",
" \"enabled\": true, \n",
" \"loss_scale\": 0, \n",
" \"loss_scale_window\": 1000, \n",
" \"initial_scale_power\": 16, \n",
" \"hysteresis\": 2, \n",
" \"min_loss_scale\": 1\n",
" }, \n",
" \"optimizer\": {\n",
" \"type\": \"AdamW\", \n",
" \"params\": {\n",
" \"lr\": 1e-05, \n",
" \"betas\": [0.9, 0.999], \n",
" \"eps\": 1e-08, \n",
" \"weight_decay\": 0.0\n",
" }\n",
" }, \n",
" \"scheduler\": {\n",
" \"type\": \"WarmupLR\", \n",
" \"params\": {\n",
" \"warmup_min_lr\": 0, \n",
" \"warmup_max_lr\": 1e-05, \n",
" \"warmup_num_steps\": 100\n",
" }\n",
" }, \n",
" \"zero_optimization\": {\n",
" \"stage\": 2, \n",
" \"offload_optimizer\": {\n",
" \"device\": \"cpu\", \n",
" \"pin_memory\": true\n",
" }, \n",
" \"allgather_partitions\": true, \n",
" \"allgather_bucket_size\": 2.000000e+08, \n",
" \"overlap_comm\": true, \n",
" \"reduce_scatter\": true, \n",
" \"reduce_bucket_size\": 2.000000e+08, \n",
" \"contiguous_gradients\": true\n",
" }, \n",
" \"gradient_accumulation_steps\": 2, \n",
" \"gradient_clipping\": 1.0, \n",
" \"train_batch_size\": 64, \n",
" \"train_micro_batch_size_per_gpu\": 32\n",
"}\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"Using /home/ubuntu/.cache/torch_extensions/py38_cu117 as PyTorch extensions root...\n",
"No modifications detected for re-loaded extension module utils, skipping build step...\n",
"Loading extension module utils...\n",
"***** Running training *****\n",
" Num examples = 64000\n",
" Num Epochs = 9223372036854775807\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Time to load utils op: 0.005568742752075195 seconds\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" Instantaneous batch size per device = 32\n",
" Total train batch size (w. parallel, distributed & accumulation) = 64\n",
" Gradient Accumulation steps = 2\n",
" Total optimization steps = 1000\n",
" Number of trainable parameters = 1543304960\n",
"Reading metadata...: 2165it [00:00, 64424.75it/s]\n",
"The following columns in the training set don't have a corresponding argument in `WhisperForConditionalGeneration.forward` and have been ignored: sentence, audio, input_length. If sentence, audio, input_length are not expected by `WhisperForConditionalGeneration.forward`, you can safely ignore this message.\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"[2022-12-16 13:30:12,371] [INFO] [stage_1_and_2.py:1765:step] [deepspeed] OVERFLOW! Rank 0 Skipping step. Attempted loss scale: 65536, reducing to 65536\n"
]
},
{
"data": {
"text/html": [
"\n",
" \n",
" \n",
"
\n",
" [ 101/1000 24:25 < 3:41:52, 0.07 it/s, Epoch 0.10/9223372036854775807]\n",
"
\n",
" \n",
" \n",
" \n",
" Step | \n",
" Training Loss | \n",
" Validation Loss | \n",
"
\n",
" \n",
" \n",
" \n",
"
"
],
"text/plain": [
""
]
},
"metadata": {},
"output_type": "display_data"
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"[2022-12-16 13:30:26,874] [INFO] [stage_1_and_2.py:1765:step] [deepspeed] OVERFLOW! Rank 0 Skipping step. Attempted loss scale: 65536, reducing to 32768.0\n",
"[2022-12-16 13:30:26,878] [INFO] [timer.py:197:stop] 0/4, RunningAvgSamplesPerSec=6.359143267427371, CurrSamplesPerSec=6.05562698727593, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:30:41,688] [INFO] [stage_1_and_2.py:1765:step] [deepspeed] OVERFLOW! Rank 0 Skipping step. Attempted loss scale: 32768.0, reducing to 16384.0\n",
"[2022-12-16 13:30:41,692] [INFO] [timer.py:197:stop] 0/6, RunningAvgSamplesPerSec=6.442681205426282, CurrSamplesPerSec=6.185670974618687, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:30:56,690] [INFO] [timer.py:197:stop] 0/8, RunningAvgSamplesPerSec=6.250138482974161, CurrSamplesPerSec=5.239387620689085, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:31:11,390] [INFO] [timer.py:197:stop] 0/10, RunningAvgSamplesPerSec=6.199217766209486, CurrSamplesPerSec=5.469275533253204, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:31:25,760] [INFO] [timer.py:197:stop] 0/12, RunningAvgSamplesPerSec=6.187350055236473, CurrSamplesPerSec=5.465893137251788, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:31:40,116] [INFO] [timer.py:197:stop] 0/14, RunningAvgSamplesPerSec=6.182433423403349, CurrSamplesPerSec=5.578369458479616, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:31:54,845] [INFO] [timer.py:197:stop] 0/16, RunningAvgSamplesPerSec=6.168209488109452, CurrSamplesPerSec=5.481925126875305, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:32:09,837] [INFO] [timer.py:197:stop] 0/18, RunningAvgSamplesPerSec=6.166182771011467, CurrSamplesPerSec=5.576634147245411, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:32:24,514] [INFO] [logging.py:68:log_dist] [Rank 0] step=10, skipped=3, lr=[4.225490200071284e-06], mom=[[0.9, 0.999]]\n",
"[2022-12-16 13:32:24,516] [INFO] [timer.py:197:stop] 0/20, RunningAvgSamplesPerSec=6.14923830606697, CurrSamplesPerSec=5.4862895214102165, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:32:39,349] [INFO] [timer.py:197:stop] 0/22, RunningAvgSamplesPerSec=6.1362626639738735, CurrSamplesPerSec=5.400516433628815, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:32:54,284] [INFO] [timer.py:197:stop] 0/24, RunningAvgSamplesPerSec=6.116244939022191, CurrSamplesPerSec=5.331918879729864, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:33:09,152] [INFO] [timer.py:197:stop] 0/26, RunningAvgSamplesPerSec=6.111198466738003, CurrSamplesPerSec=5.46965978645552, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:33:24,156] [INFO] [timer.py:197:stop] 0/28, RunningAvgSamplesPerSec=6.1089592602745295, CurrSamplesPerSec=5.493128255256428, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:33:39,020] [INFO] [timer.py:197:stop] 0/30, RunningAvgSamplesPerSec=6.107566100664851, CurrSamplesPerSec=5.454037094012864, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:33:53,833] [INFO] [timer.py:197:stop] 0/32, RunningAvgSamplesPerSec=6.104934863126791, CurrSamplesPerSec=5.402098409851611, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:34:08,846] [INFO] [timer.py:197:stop] 0/34, RunningAvgSamplesPerSec=6.106360932585499, CurrSamplesPerSec=5.522979183752241, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:34:23,904] [INFO] [timer.py:197:stop] 0/36, RunningAvgSamplesPerSec=6.1056306050274625, CurrSamplesPerSec=5.534223579655121, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:34:38,360] [INFO] [timer.py:197:stop] 0/38, RunningAvgSamplesPerSec=6.107025499795947, CurrSamplesPerSec=5.517999025063001, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:34:52,987] [INFO] [logging.py:68:log_dist] [Rank 0] step=20, skipped=3, lr=[6.15224460689137e-06], mom=[[0.9, 0.999]]\n",
"[2022-12-16 13:34:52,989] [INFO] [timer.py:197:stop] 0/40, RunningAvgSamplesPerSec=6.106790407037756, CurrSamplesPerSec=5.482855146086943, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:35:07,678] [INFO] [timer.py:197:stop] 0/42, RunningAvgSamplesPerSec=6.106137329541975, CurrSamplesPerSec=5.49275126263391, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:35:22,387] [INFO] [timer.py:197:stop] 0/44, RunningAvgSamplesPerSec=6.101678041171418, CurrSamplesPerSec=5.378597132969251, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:35:36,795] [INFO] [timer.py:197:stop] 0/46, RunningAvgSamplesPerSec=6.09914148974474, CurrSamplesPerSec=5.4613528889589125, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:35:51,434] [INFO] [timer.py:197:stop] 0/48, RunningAvgSamplesPerSec=6.099051394709131, CurrSamplesPerSec=5.4884021703559025, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:36:05,840] [INFO] [timer.py:197:stop] 0/50, RunningAvgSamplesPerSec=6.100214513297745, CurrSamplesPerSec=5.47020037322054, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:36:20,385] [INFO] [timer.py:197:stop] 0/52, RunningAvgSamplesPerSec=6.098726784398205, CurrSamplesPerSec=5.433712844102512, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:36:35,297] [INFO] [timer.py:197:stop] 0/54, RunningAvgSamplesPerSec=6.0936261097499305, CurrSamplesPerSec=5.489423519383715, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:36:50,352] [INFO] [timer.py:197:stop] 0/56, RunningAvgSamplesPerSec=6.0895209772752175, CurrSamplesPerSec=5.477875509886321, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:37:05,025] [INFO] [timer.py:197:stop] 0/58, RunningAvgSamplesPerSec=6.0909453489179874, CurrSamplesPerSec=5.569184849154154, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:37:20,470] [INFO] [logging.py:68:log_dist] [Rank 0] step=30, skipped=3, lr=[7.156818820794936e-06], mom=[[0.9, 0.999]]\n",
"[2022-12-16 13:37:20,471] [INFO] [timer.py:197:stop] 0/60, RunningAvgSamplesPerSec=6.091265755090447, CurrSamplesPerSec=5.483664270842526, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:37:35,692] [INFO] [timer.py:197:stop] 0/62, RunningAvgSamplesPerSec=6.090134375608421, CurrSamplesPerSec=5.589692236464059, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:37:50,466] [INFO] [timer.py:197:stop] 0/64, RunningAvgSamplesPerSec=6.087445931775481, CurrSamplesPerSec=5.484206733168804, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:38:05,662] [INFO] [timer.py:197:stop] 0/66, RunningAvgSamplesPerSec=6.085481288319546, CurrSamplesPerSec=5.456354321207382, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:38:20,812] [INFO] [timer.py:197:stop] 0/68, RunningAvgSamplesPerSec=6.0849633855165886, CurrSamplesPerSec=5.560404510325844, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:38:35,454] [INFO] [timer.py:197:stop] 0/70, RunningAvgSamplesPerSec=6.082841357920188, CurrSamplesPerSec=5.466938858736218, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:38:50,590] [INFO] [timer.py:197:stop] 0/72, RunningAvgSamplesPerSec=6.081021514785823, CurrSamplesPerSec=5.512498905449392, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:39:05,062] [INFO] [timer.py:197:stop] 0/74, RunningAvgSamplesPerSec=6.081159270273127, CurrSamplesPerSec=5.590209079941052, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:39:19,759] [INFO] [timer.py:197:stop] 0/76, RunningAvgSamplesPerSec=6.082366611952337, CurrSamplesPerSec=5.557982892618384, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:39:34,541] [INFO] [timer.py:197:stop] 0/78, RunningAvgSamplesPerSec=6.0817232366484815, CurrSamplesPerSec=5.549053124586951, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:39:49,756] [INFO] [logging.py:68:log_dist] [Rank 0] step=40, skipped=3, lr=[7.841008620334974e-06], mom=[[0.9, 0.999]]\n",
"[2022-12-16 13:39:49,758] [INFO] [timer.py:197:stop] 0/80, RunningAvgSamplesPerSec=6.077909701875687, CurrSamplesPerSec=5.460640310175479, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:40:04,561] [INFO] [timer.py:197:stop] 0/82, RunningAvgSamplesPerSec=6.0782675124530545, CurrSamplesPerSec=5.539460325595526, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:40:19,768] [INFO] [timer.py:197:stop] 0/84, RunningAvgSamplesPerSec=6.078390170810889, CurrSamplesPerSec=5.471118389391574, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:40:34,291] [INFO] [timer.py:197:stop] 0/86, RunningAvgSamplesPerSec=6.0795061630666805, CurrSamplesPerSec=5.454458221341045, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:40:49,017] [INFO] [timer.py:197:stop] 0/88, RunningAvgSamplesPerSec=6.07951693722567, CurrSamplesPerSec=5.479403593034189, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:41:04,035] [INFO] [timer.py:197:stop] 0/90, RunningAvgSamplesPerSec=6.07713721760037, CurrSamplesPerSec=5.456287998666598, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:41:18,947] [INFO] [timer.py:197:stop] 0/92, RunningAvgSamplesPerSec=6.077188149302228, CurrSamplesPerSec=5.50184879924073, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:41:33,488] [INFO] [timer.py:197:stop] 0/94, RunningAvgSamplesPerSec=6.07680443723328, CurrSamplesPerSec=5.473631408915449, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:41:47,873] [INFO] [timer.py:197:stop] 0/96, RunningAvgSamplesPerSec=6.078162490871368, CurrSamplesPerSec=5.581634406904709, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:42:02,601] [INFO] [timer.py:197:stop] 0/98, RunningAvgSamplesPerSec=6.07918864648028, CurrSamplesPerSec=5.5189762738392805, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:42:17,393] [INFO] [logging.py:68:log_dist] [Rank 0] step=50, skipped=3, lr=[8.360489289678585e-06], mom=[[0.9, 0.999]]\n",
"[2022-12-16 13:42:17,394] [INFO] [timer.py:197:stop] 0/100, RunningAvgSamplesPerSec=6.079721917832034, CurrSamplesPerSec=5.565398772008929, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:42:32,340] [INFO] [timer.py:197:stop] 0/102, RunningAvgSamplesPerSec=6.079079748095393, CurrSamplesPerSec=5.53473523707476, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:42:46,647] [INFO] [timer.py:197:stop] 0/104, RunningAvgSamplesPerSec=6.080121170095116, CurrSamplesPerSec=5.546972622941274, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:43:01,497] [INFO] [timer.py:197:stop] 0/106, RunningAvgSamplesPerSec=6.080171891208223, CurrSamplesPerSec=5.554791525418257, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:43:16,886] [INFO] [timer.py:197:stop] 0/108, RunningAvgSamplesPerSec=6.081835087987497, CurrSamplesPerSec=5.58878961931587, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:43:31,557] [INFO] [timer.py:197:stop] 0/110, RunningAvgSamplesPerSec=6.081642070013767, CurrSamplesPerSec=5.4866510488093745, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:43:46,623] [INFO] [timer.py:197:stop] 0/112, RunningAvgSamplesPerSec=6.081178765969859, CurrSamplesPerSec=5.503815912407227, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:44:01,079] [INFO] [timer.py:197:stop] 0/114, RunningAvgSamplesPerSec=6.081720835190203, CurrSamplesPerSec=5.504593308578239, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:44:16,482] [INFO] [timer.py:197:stop] 0/116, RunningAvgSamplesPerSec=6.080214101573954, CurrSamplesPerSec=5.447973428766622, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:44:30,878] [INFO] [timer.py:197:stop] 0/118, RunningAvgSamplesPerSec=6.080946889626902, CurrSamplesPerSec=5.482487848212496, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:44:45,818] [INFO] [logging.py:68:log_dist] [Rank 0] step=60, skipped=3, lr=[8.779374278362457e-06], mom=[[0.9, 0.999]]\n",
"[2022-12-16 13:44:45,820] [INFO] [timer.py:197:stop] 0/120, RunningAvgSamplesPerSec=6.079880158785482, CurrSamplesPerSec=5.410680853516901, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:45:00,308] [INFO] [timer.py:197:stop] 0/122, RunningAvgSamplesPerSec=6.0800487826701, CurrSamplesPerSec=5.51626365654903, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:45:14,816] [INFO] [timer.py:197:stop] 0/124, RunningAvgSamplesPerSec=6.079264706998162, CurrSamplesPerSec=5.5087795351327316, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:45:29,834] [INFO] [timer.py:197:stop] 0/126, RunningAvgSamplesPerSec=6.079846025945439, CurrSamplesPerSec=5.4936159271225256, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:45:44,851] [INFO] [timer.py:197:stop] 0/128, RunningAvgSamplesPerSec=6.078175014056017, CurrSamplesPerSec=5.362789672759209, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:45:59,717] [INFO] [timer.py:197:stop] 0/130, RunningAvgSamplesPerSec=6.079084147393918, CurrSamplesPerSec=5.541067573145952, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:46:14,797] [INFO] [timer.py:197:stop] 0/132, RunningAvgSamplesPerSec=6.078861152800443, CurrSamplesPerSec=5.477529668210021, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:46:29,130] [INFO] [timer.py:197:stop] 0/134, RunningAvgSamplesPerSec=6.080307858784291, CurrSamplesPerSec=5.559784456324279, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:46:44,072] [INFO] [timer.py:197:stop] 0/136, RunningAvgSamplesPerSec=6.07930472994486, CurrSamplesPerSec=5.580544815418963, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:46:58,724] [INFO] [timer.py:197:stop] 0/138, RunningAvgSamplesPerSec=6.078143442074857, CurrSamplesPerSec=5.314022303909574, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:47:13,565] [INFO] [logging.py:68:log_dist] [Rank 0] step=70, skipped=3, lr=[9.130374013504131e-06], mom=[[0.9, 0.999]]\n",
"[2022-12-16 13:47:13,567] [INFO] [timer.py:197:stop] 0/140, RunningAvgSamplesPerSec=6.077773018304035, CurrSamplesPerSec=5.36396394293752, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:47:28,585] [INFO] [timer.py:197:stop] 0/142, RunningAvgSamplesPerSec=6.077639901148956, CurrSamplesPerSec=5.461940956484987, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:47:43,370] [INFO] [timer.py:197:stop] 0/144, RunningAvgSamplesPerSec=6.078145299833403, CurrSamplesPerSec=5.518752068230726, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:47:58,138] [INFO] [timer.py:197:stop] 0/146, RunningAvgSamplesPerSec=6.077895578341843, CurrSamplesPerSec=5.5512169228705694, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:48:13,249] [INFO] [timer.py:197:stop] 0/148, RunningAvgSamplesPerSec=6.077814599805705, CurrSamplesPerSec=5.491996533706707, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:48:27,903] [INFO] [timer.py:197:stop] 0/150, RunningAvgSamplesPerSec=6.076902008713945, CurrSamplesPerSec=5.332959519203826, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:48:42,061] [INFO] [timer.py:197:stop] 0/152, RunningAvgSamplesPerSec=6.0783920477415, CurrSamplesPerSec=5.55632004431884, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:48:56,535] [INFO] [timer.py:197:stop] 0/154, RunningAvgSamplesPerSec=6.078732911919996, CurrSamplesPerSec=5.458582053389676, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:49:11,153] [INFO] [timer.py:197:stop] 0/156, RunningAvgSamplesPerSec=6.079008773246385, CurrSamplesPerSec=5.543700002028015, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:49:25,910] [INFO] [timer.py:197:stop] 0/158, RunningAvgSamplesPerSec=6.077901196166508, CurrSamplesPerSec=5.497338259422809, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:49:41,537] [INFO] [logging.py:68:log_dist] [Rank 0] step=80, skipped=3, lr=[9.432453625862409e-06], mom=[[0.9, 0.999]]\n",
"[2022-12-16 13:49:41,538] [INFO] [timer.py:197:stop] 0/160, RunningAvgSamplesPerSec=6.076246622231216, CurrSamplesPerSec=5.371769954520086, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:49:56,401] [INFO] [timer.py:197:stop] 0/162, RunningAvgSamplesPerSec=6.076453900936628, CurrSamplesPerSec=5.506221269741678, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:50:10,825] [INFO] [timer.py:197:stop] 0/164, RunningAvgSamplesPerSec=6.077190405328247, CurrSamplesPerSec=5.540300881158864, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:50:26,051] [INFO] [timer.py:197:stop] 0/166, RunningAvgSamplesPerSec=6.077041030460407, CurrSamplesPerSec=5.509338737628304, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:50:40,658] [INFO] [timer.py:197:stop] 0/168, RunningAvgSamplesPerSec=6.077215911476895, CurrSamplesPerSec=5.465386561501869, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:50:55,458] [INFO] [timer.py:197:stop] 0/170, RunningAvgSamplesPerSec=6.078024759766047, CurrSamplesPerSec=5.50886816782787, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:51:10,316] [INFO] [timer.py:197:stop] 0/172, RunningAvgSamplesPerSec=6.077383165843686, CurrSamplesPerSec=5.523170094873224, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:51:25,012] [INFO] [timer.py:197:stop] 0/174, RunningAvgSamplesPerSec=6.077781559393984, CurrSamplesPerSec=5.523025319397122, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:51:39,852] [INFO] [timer.py:197:stop] 0/176, RunningAvgSamplesPerSec=6.078433625571553, CurrSamplesPerSec=5.568140074904409, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:51:54,462] [INFO] [timer.py:197:stop] 0/178, RunningAvgSamplesPerSec=6.077703528416714, CurrSamplesPerSec=5.485578492081232, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:52:10,224] [INFO] [logging.py:68:log_dist] [Rank 0] step=90, skipped=3, lr=[9.697596263093091e-06], mom=[[0.9, 0.999]]\n",
"[2022-12-16 13:52:10,225] [INFO] [timer.py:197:stop] 0/180, RunningAvgSamplesPerSec=6.078332169068046, CurrSamplesPerSec=5.557059195233783, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:52:24,902] [INFO] [timer.py:197:stop] 0/182, RunningAvgSamplesPerSec=6.077463728768749, CurrSamplesPerSec=5.458477050176381, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:52:40,291] [INFO] [timer.py:197:stop] 0/184, RunningAvgSamplesPerSec=6.076290749991055, CurrSamplesPerSec=5.492102156302423, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:52:54,947] [INFO] [timer.py:197:stop] 0/186, RunningAvgSamplesPerSec=6.07565767596194, CurrSamplesPerSec=5.388023243640082, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:53:09,877] [INFO] [timer.py:197:stop] 0/188, RunningAvgSamplesPerSec=6.075693478896997, CurrSamplesPerSec=5.5255764788423285, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:53:24,501] [INFO] [timer.py:197:stop] 0/190, RunningAvgSamplesPerSec=6.0754839341391245, CurrSamplesPerSec=5.539336641858779, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:53:38,944] [INFO] [timer.py:197:stop] 0/192, RunningAvgSamplesPerSec=6.075775207227371, CurrSamplesPerSec=5.469889160531412, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:53:53,465] [INFO] [timer.py:197:stop] 0/194, RunningAvgSamplesPerSec=6.075782725725061, CurrSamplesPerSec=5.488814031317932, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:54:08,605] [INFO] [timer.py:197:stop] 0/196, RunningAvgSamplesPerSec=6.075932541941936, CurrSamplesPerSec=5.513335374613696, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:54:23,298] [INFO] [timer.py:197:stop] 0/198, RunningAvgSamplesPerSec=6.076327542642147, CurrSamplesPerSec=5.506319307844521, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n",
"[2022-12-16 13:54:38,322] [INFO] [logging.py:68:log_dist] [Rank 0] step=100, skipped=3, lr=[9.933858671331224e-06], mom=[[0.9, 0.999]]\n",
"[2022-12-16 13:54:38,323] [INFO] [timer.py:197:stop] 0/200, RunningAvgSamplesPerSec=6.07605192544789, CurrSamplesPerSec=5.413820537564129, MemAllocated=3.0GB, MaxMemAllocated=19.53GB\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"***** Running Evaluation *****\n",
" Num examples: Unknown\n",
" Batch size = 8\n",
"Reading metadata...: 1704it [00:00, 13668.60it/s]\n",
"The following columns in the evaluation set don't have a corresponding argument in `WhisperForConditionalGeneration.forward` and have been ignored: sentence, audio, input_length. If sentence, audio, input_length are not expected by `WhisperForConditionalGeneration.forward`, you can safely ignore this message.\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"/home/ubuntu/.local/lib/python3.8/site-packages/transformers/generation/utils.py:1134: UserWarning: You have modified the pretrained model configuration to control generation. This is a deprecated strategy to control generation and will be removed soon, in a future version. Please use a generation configuration file (see https://huggingface.co/docs/transformers/main_classes/text_generation)\n",
" warnings.warn(\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n",
"Generate config GenerationConfig {\n",
" \"begin_suppress_tokens\": [\n",
" 220,\n",
" 50257\n",
" ],\n",
" \"bos_token_id\": 50257,\n",
" \"decoder_start_token_id\": 50258,\n",
" \"eos_token_id\": 50257,\n",
" \"max_length\": 448,\n",
" \"pad_token_id\": 50257,\n",
" \"suppress_tokens\": [],\n",
" \"transformers_version\": \"4.26.0.dev0\",\n",
" \"use_cache\": false\n",
"}\n",
"\n"
]
}
],
"source": [
"trainer.train()"
]
},
{
"cell_type": "markdown",
"id": "810ced54-7187-4a06-b2fe-ba6dcca94dc3",
"metadata": {
"id": "810ced54-7187-4a06-b2fe-ba6dcca94dc3"
},
"source": [
"We can label our checkpoint with the `whisper-event` tag on push by setting the appropriate key-word arguments (kwargs):"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "c704f91e-241b-48c9-b8e0-f0da396a9663",
"metadata": {
"id": "c704f91e-241b-48c9-b8e0-f0da396a9663"
},
"outputs": [],
"source": [
"kwargs = {\n",
" \"dataset_tags\": dataset_names,\n",
" \"dataset\": \"Common Voice 11.0, FB Voxpopuli, Google FLEURS\", # a 'pretty' name for the training dataset\n",
" \"language\": \"fi\",\n",
" \"model_name\": \"Whisper Large Fi - Sormunen Teemu\", # a 'pretty' name for your model\n",
" \"finetuned_from\": \"openai/whisper-large\",\n",
" \"tasks\": \"automatic-speech-recognition\",\n",
" \"tags\": \"whisper-event\",\n",
"}"
]
},
{
"cell_type": "markdown",
"id": "090d676a-f944-4297-a938-a40eda0b2b68",
"metadata": {
"id": "090d676a-f944-4297-a938-a40eda0b2b68"
},
"source": [
"The training results can now be uploaded to the Hub. To do so, execute the `push_to_hub` command and save the preprocessor object we created:"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "d7030622-caf7-4039-939b-6195cdaa2585",
"metadata": {
"id": "d7030622-caf7-4039-939b-6195cdaa2585"
},
"outputs": [],
"source": [
"trainer.push_to_hub(**kwargs)"
]
},
{
"cell_type": "markdown",
"id": "ca743fbd-602c-48d4-ba8d-a2fe60af64ba",
"metadata": {
"id": "ca743fbd-602c-48d4-ba8d-a2fe60af64ba"
},
"source": [
"## Closing Remarks"
]
},
{
"cell_type": "markdown",
"id": "7f737783-2870-4e35-aa11-86a42d7d997a",
"metadata": {
"id": "7f737783-2870-4e35-aa11-86a42d7d997a"
},
"source": [
"In this blog, we covered a step-by-step guide on fine-tuning Whisper for multilingual ASR \n",
"using 🤗 Datasets, Transformers and the Hugging Face Hub. For more details on the Whisper model, the Common Voice dataset and the theory behind fine-tuning, refere to the accompanying [blog post](https://huggingface.co/blog/fine-tune-whisper). If you're interested in fine-tuning other \n",
"Transformers models, both for English and multilingual ASR, be sure to check out the \n",
"examples scripts at [examples/pytorch/speech-recognition](https://github.com/huggingface/transformers/tree/main/examples/pytorch/speech-recognition)."
]
}
],
"metadata": {
"colab": {
"include_colab_link": true,
"provenance": []
},
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.10"
}
},
"nbformat": 4,
"nbformat_minor": 5
}