{ "alpha": 0.0, "attn_implementation": "flash_attention_2", "beta": 1.0, "bos_token_id": 128000, "do_sample": true, "eos_token_id": [ 128001, 128008, 128009 ], "gamma": 1.0, "kl_temperature": 1.0, "max_length": 131072, "pad_token_id": 128001, "temperature": 0.4, "top_p": 0.9, "transformers_version": "4.46.3" }