{ "_name_or_path": "openai-community/gpt2", "activation_function": "gelu_new", "architectures": [ "GPT2ForSequenceClassification" ], "attn_pdrop": 0.1, "bos_token_id": 50256, "embd_pdrop": 0.1, "eos_token_id": 50256, "id2label": {"0": "af", "1": "am", "2": "ar", "3": "as", "4": "az", "5": "ba", "6": "be", "7": "bg", "8": "bn", "9": "ca", "10": "ceb", "11": "ckb", "12": "cs", "13": "cy", "14": "da", "15": "de", "16": "dv", "17": "el", "18": "en", "19": "eo", "20": "es", "21": "et", "22": "eu", "23": "fa", "24": "fi", "25": "fr", "26": "fy", "27": "ga", "28": "gd", "29": "gl", "30": "gu", "31": "he", "32": "hi", "33": "hr", "34": "hu", "35": "hy", "36": "id", "37": "is", "38": "it", "39": "ja", "40": "ka", "41": "kk", "42": "kn", "43": "ku", "44": "ky", "45": "la", "46": "lb", "47": "lt", "48": "lv", "49": "mg", "50": "mk", "51": "ml", "52": "mn", "53": "mr", "54": "mt", "55": "my", "56": "nds", "57": "ne", "58": "nl", "59": "nn", "60": "no", "61": "or", "62": "pa", "63": "pl", "64": "ps", "65": "pt", "66": "ro", "67": "ru", "68": "sah", "69": "sd", "70": "si", "71": "sk", "72": "sl", "73": "sq", "74": "sr", "75": "sv", "76": "sw", "77": "ta", "78": "te", "79": "tg", "80": "th", "81": "tk", "82": "tl", "83": "tr", "84": "tt", "85": "ug", "86": "uk", "87": "ur", "88": "vi", "89": "yi"}, "initializer_range": 0.02, "label2id": { "af": 0, "am": 1, "ar": 2, "as": 3, "az": 4, "ba": 5, "be": 6, "bg": 7, "bn": 8, "ca": 9, "ceb": 10, "ckb": 11, "cs": 12, "cy": 13, "da": 14, "de": 15, "dv": 16, "el": 17, "en": 18, "eo": 19, "es": 20, "et": 21, "eu": 22, "fa": 23, "fi": 24, "fr": 25, "fy": 26, "ga": 27, "gd": 28, "gl": 29, "gu": 30, "he": 31, "hi": 32, "hr": 33, "hu": 34, "hy": 35, "id": 36, "is": 37, "it": 38, "ja": 39, "ka": 40, "kk": 41, "kn": 42, "ku": 43, "ky": 44, "la": 45, "lb": 46, "lt": 47, "lv": 48, "mg": 49, "mk": 50, "ml": 51, "mn": 52, "mr": 53, "mt": 54, "my": 55, "nds": 56, "ne": 57, "nl": 58, "nn": 59, "no": 60, "or": 61, "pa": 62, "pl": 63, "ps": 64, "pt": 65, "ro": 66, "ru": 67, "sah": 68, "sd": 69, "si": 70, "sk": 71, "sl": 72, "sq": 73, "sr": 74, "sv": 75, "sw": 76, "ta": 77, "te": 78, "tg": 79, "th": 80, "tk": 81, "tl": 82, "tr": 83, "tt": 84, "ug": 85, "uk": 86, "ur": 87, "vi": 88, "yi": 89 }, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 1024, "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "n_positions": 1024, "pad_token_id": 50256, "problem_type": "single_label_classification", "reorder_and_upcast_attn": false, "resid_pdrop": 0.1, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "task_specific_params": { "text-generation": { "do_sample": true, "max_length": 50 } }, "torch_dtype": "float32", "transformers_version": "4.36.2", "use_cache": true, "vocab_size": 50257 }