{ "_name_or_path": "boun-tabi-LMG/TURNA", "architectures": [ "T5ForClassification" ], "classifier_dropout": 0.0, "d_ff": 2816, "d_kv": 64, "d_model": 1024, "decoder_start_token_id": 0, "dense_act_fn": "gelu_new", "dropout_prob": 0.1, "dropout_rate": 0.1, "eos_token_id": 1, "feed_forward_proj": "gated-gelu", "id2label": { "0": "LABEL_0", "1": "LABEL_1", "2": "LABEL_2", "3": "LABEL_3", "4": "LABEL_4" }, "initializer_factor": 1.0, "is_encoder_decoder": true, "is_gated_act": true, "label2id": { "LABEL_0": 0, "LABEL_1": 1, "LABEL_2": 2, "LABEL_3": 3, "LABEL_4": 4 }, "layer_norm_epsilon": 1e-06, "model_type": "t5", "num_decoder_layers": 36, "num_heads": 16, "num_layers": 36, "output_past": true, "pad_token_id": 0, "problem_type": "single_label_classification", "relative_attention_max_distance": 128, "relative_attention_num_buckets": 32, "tie_word_embeddings": false, "torch_dtype": "float32", "transformers_version": "4.38.2", "use_cache": true, "vocab_size": 32128 }