{ "_name_or_path": "mistralai/Mistral-7B-v0.1", "architectures": [ "ZettHypernet" ], "attention_dropout": 0.0, "auto_map": { "AutoConfig": "configuration_hypernet.ZettHypernetConfig", "AutoModel": "modeling_hypernet.ZettHypernet" }, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 4096, "hn_add_inter_token_attention": false, "hn_concat_last_hidden_state": false, "hn_embed_lang_id": false, "hn_embed_target_priors": false, "hn_embed_using_source_embeddings": true, "hn_hidden_size": 4096, "hn_inter_token_attention_bias_by_priors": true, "hn_inter_token_attention_bias_scaler": 1.0, "hn_intermediate_size": 8192, "hn_language_adapter_bottleneck_dim": 0, "hn_model_name_or_path": "roberta-base", "hn_model_type": "roberta", "hn_n_extra_tokens": 522, "hn_n_inter_token_blocks": 16, "hn_n_layers": 3, "hn_num_attention_heads": 32, "hn_predict_bias": true, "hn_rescale_embeddings": true, "hn_single_head": false, "hn_surface_maxlen": 7, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 32768, "n_embd": 4096, "n_langs": 7, "name": "v7:mistral7b_en+code:lw=0.5_long", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "original_vocab_size": 32000, "pad_token_id": 2, "rms_norm_eps": 1e-05, "rope_theta": 10000.0, "separate_out_embeddings": true, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float32", "transformers_version": "4.39.0.dev0", "use_cache": true, "use_unigram_bias": true, "vocab_size": 32896 }