{ "attn_softmax_bf16": null, "bos_token_id": 1, "bucket_internal": null, "bucket_size": -1, "do_sample": true, "eos_token_id": 2, "flash_attention_recompute": null, "ignore_eos": null, "limit_hpu_graphs": null, "max_length": 4096, "pad_token_id": 0, "reduce_recompile": null, "reuse_cache": null, "static_shapes": null, "temperature": 0.6, "top_p": 0.9, "transformers_version": "4.38.2", "trim_logits": null, "use_flash_attention": null, "use_fused_rope": null }