{ "attn_implementation": "flash_attention_2", "bos_token_id": 151643, "eos_token_id": [ 151645, 151643 ], "pad_token_id": 151643, "transformers_version": "4.51.0", "use_cache": false }