{ "attn_implementation": "flash_attention_2", "bos_token_id": 128000, "eos_token_id": [ 128001, 128008, 128009 ], "transformers_version": "4.45.2" }