vivek2001123 commited on
Commit
465b261
1 Parent(s): 35049ab

Delete config.json

Browse files
Files changed (1) hide show
  1. config.json +0 -47
config.json DELETED
@@ -1,47 +0,0 @@
1
- {
2
- "_name_or_path": "gpt2",
3
- "activation_function": "gelu_new",
4
- "adapters": {
5
- "adapters": {
6
- "finance": "pfeiffer"
7
- },
8
- "config_map": {},
9
- "fusion_config_map": {},
10
- "fusions": {}
11
- },
12
- "architectures": [
13
- "GPT2LMHeadModel"
14
- ],
15
- "attn_pdrop": 0.1,
16
- "bos_token_id": 50256,
17
- "embd_pdrop": 0.1,
18
- "eos_token_id": 50256,
19
- "initializer_range": 0.02,
20
- "layer_norm_epsilon": 1e-05,
21
- "model_type": "gpt2",
22
- "n_ctx": 1024,
23
- "n_embd": 768,
24
- "n_head": 12,
25
- "n_inner": null,
26
- "n_layer": 12,
27
- "n_positions": 1024,
28
- "reorder_and_upcast_attn": false,
29
- "resid_pdrop": 0.1,
30
- "scale_attn_by_inverse_layer_idx": false,
31
- "scale_attn_weights": true,
32
- "summary_activation": null,
33
- "summary_first_dropout": 0.1,
34
- "summary_proj_to_labels": true,
35
- "summary_type": "cls_index",
36
- "summary_use_proj": true,
37
- "task_specific_params": {
38
- "text-generation": {
39
- "do_sample": true,
40
- "max_length": 50
41
- }
42
- },
43
- "torch_dtype": "float32",
44
- "transformers_version": "4.26.1",
45
- "use_cache": true,
46
- "vocab_size": 50257
47
- }