candle-metavoice / first_stage.model_args.json
lmz's picture
Upload 3 files
b5b2671 verified
raw
history blame contribute delete
321 Bytes
{"n_layer": 24, "n_head": 16, "n_embd": 2048, "block_size": 2048, "bias": false, "vocab_sizes": [2562], "dropout": 0.0, "causal": true, "norm_type": "rmsnorm", "rmsnorm_eps": 1e-05, "nonlinearity_type": "swiglu", "spk_emb_on_text": true, "attn_kernel_type": "torch_attn", "swiglu_multiple_of": 256, "spkemb_dropout": 0.1}