# BEGIN GENERAL GGUF METADATA id: codestral model: codestral name: codestral version: 2 # END GENERAL GGUF METADATA # BEGIN INFERENCE PARAMETERS # BEGIN REQUIRED stop: - "[/INST]" # END REQUIRED # BEGIN OPTIONAL dynatemp_exponent: 1 dynatemp_range: 0 frequency_penalty: 0 ignore_eos: false max_tokens: 4096 min_keep: 0 min_p: 0.05 mirostat: false mirostat_eta: 0.100000001 mirostat_tau: 5 n_probs: 0 penalize_nl: false presence_penalty: 0 repeat_last_n: 64 repeat_penalty: 1 seed: -1 stream: true temperature: 0.7 tfs_z: 1 top_k: 40 top_p: 0.9 typ_p: 1 # END OPTIONAL # END INFERENCE PARAMETERS # BEGIN MODEL LOAD PARAMETERS # BEGIN REQUIRED ctx_len: 4096 engine: llama-cpp ngl: 57 prompt_template: "[INST] {system_message}\n\n{prompt}[/INST]" # END OPTIONAL # END MODEL LOAD PARAMETERS