ACA commited on
Commit
eeef0c9
1 Parent(s): 112b215

Remove checkpoints

Browse files
checkpoint-31500/config.json DELETED
@@ -1,38 +0,0 @@
1
- {
2
- "_name_or_path": "microsoft/DialoGPT-small",
3
- "activation_function": "gelu_new",
4
- "architectures": [
5
- "GPT2LMHeadModel"
6
- ],
7
- "attn_pdrop": 0.1,
8
- "bos_token_id": 50256,
9
- "embd_pdrop": 0.1,
10
- "eos_token_id": 50256,
11
- "initializer_range": 0.02,
12
- "layer_norm_epsilon": 1e-05,
13
- "model_type": "gpt2",
14
- "n_ctx": 1024,
15
- "n_embd": 768,
16
- "n_head": 12,
17
- "n_inner": null,
18
- "n_layer": 12,
19
- "n_positions": 1024,
20
- "reorder_and_upcast_attn": false,
21
- "resid_pdrop": 0.1,
22
- "scale_attn_by_inverse_layer_idx": false,
23
- "scale_attn_weights": true,
24
- "summary_activation": null,
25
- "summary_first_dropout": 0.1,
26
- "summary_proj_to_labels": true,
27
- "summary_type": "cls_index",
28
- "summary_use_proj": true,
29
- "task_specific_params": {
30
- "conversational": {
31
- "max_length": 1000
32
- }
33
- },
34
- "torch_dtype": "float32",
35
- "transformers_version": "4.12.5",
36
- "use_cache": true,
37
- "vocab_size": 50257
38
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
checkpoint-31500/merges.txt DELETED
The diff for this file is too large to render. See raw diff
 
checkpoint-31500/optimizer.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:0f05638d102a09dfd44c9c6ddc71d7e58a3ae6ff8ed9f84b9c55b484329897be
3
- size 995604017
 
 
 
 
checkpoint-31500/pytorch_model.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:9e48fc439199d40fbff5c0962dcc007d853eb015913037d0c087cafa6706dc57
3
- size 510403817
 
 
 
 
checkpoint-31500/scheduler.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:61354693843f0055ad1c3a570f7d4ad8f7743911b7694a093f452a3c7928462c
3
- size 623
 
 
 
 
checkpoint-31500/special_tokens_map.json DELETED
@@ -1 +0,0 @@
1
- {"bos_token": {"content": "<|endoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, "eos_token": {"content": "<|endoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, "unk_token": {"content": "<|endoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}}
 
 
checkpoint-31500/tokenizer.json DELETED
The diff for this file is too large to render. See raw diff
 
checkpoint-31500/tokenizer_config.json DELETED
@@ -1 +0,0 @@
1
- {"unk_token": {"content": "<|endoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "bos_token": {"content": "<|endoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "eos_token": {"content": "<|endoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "add_prefix_space": false, "model_max_length": 1024, "special_tokens_map_file": null, "name_or_path": "microsoft/DialoGPT-small", "errors": "replace", "tokenizer_class": "GPT2Tokenizer"}
 
 
checkpoint-31500/training_args.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:526585760563eb256b197e4a774d047937fe6cede36bd2c029e42120980e77bc
3
- size 1327
 
 
 
 
checkpoint-31500/vocab.json DELETED
The diff for this file is too large to render. See raw diff
 
checkpoint-35000/config.json DELETED
@@ -1,38 +0,0 @@
1
- {
2
- "_name_or_path": "microsoft/DialoGPT-small",
3
- "activation_function": "gelu_new",
4
- "architectures": [
5
- "GPT2LMHeadModel"
6
- ],
7
- "attn_pdrop": 0.1,
8
- "bos_token_id": 50256,
9
- "embd_pdrop": 0.1,
10
- "eos_token_id": 50256,
11
- "initializer_range": 0.02,
12
- "layer_norm_epsilon": 1e-05,
13
- "model_type": "gpt2",
14
- "n_ctx": 1024,
15
- "n_embd": 768,
16
- "n_head": 12,
17
- "n_inner": null,
18
- "n_layer": 12,
19
- "n_positions": 1024,
20
- "reorder_and_upcast_attn": false,
21
- "resid_pdrop": 0.1,
22
- "scale_attn_by_inverse_layer_idx": false,
23
- "scale_attn_weights": true,
24
- "summary_activation": null,
25
- "summary_first_dropout": 0.1,
26
- "summary_proj_to_labels": true,
27
- "summary_type": "cls_index",
28
- "summary_use_proj": true,
29
- "task_specific_params": {
30
- "conversational": {
31
- "max_length": 1000
32
- }
33
- },
34
- "torch_dtype": "float32",
35
- "transformers_version": "4.12.5",
36
- "use_cache": true,
37
- "vocab_size": 50257
38
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
checkpoint-35000/merges.txt DELETED
The diff for this file is too large to render. See raw diff
 
checkpoint-35000/optimizer.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:9317fdd60279a2abbe070e970df16af57b5cb105f09c16abe6bdc8bcbe04f2ca
3
- size 995604017
 
 
 
 
checkpoint-35000/pytorch_model.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:1464606b4a09425dd95f4b4fa49efc2f8102b6fd9c42ce2fa4c0eaf8f5f8eae4
3
- size 510403817
 
 
 
 
checkpoint-35000/scheduler.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:dbb70f6ac7ad43668dcc6b26390b723f7a51ddfc7984918c27e6703ffd4b9fb1
3
- size 623
 
 
 
 
checkpoint-35000/special_tokens_map.json DELETED
@@ -1 +0,0 @@
1
- {"bos_token": {"content": "<|endoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, "eos_token": {"content": "<|endoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, "unk_token": {"content": "<|endoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}}
 
 
checkpoint-35000/tokenizer.json DELETED
The diff for this file is too large to render. See raw diff
 
checkpoint-35000/tokenizer_config.json DELETED
@@ -1 +0,0 @@
1
- {"unk_token": {"content": "<|endoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "bos_token": {"content": "<|endoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "eos_token": {"content": "<|endoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "add_prefix_space": false, "model_max_length": 1024, "special_tokens_map_file": null, "name_or_path": "microsoft/DialoGPT-small", "errors": "replace", "tokenizer_class": "GPT2Tokenizer"}
 
 
checkpoint-35000/training_args.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:526585760563eb256b197e4a774d047937fe6cede36bd2c029e42120980e77bc
3
- size 1327
 
 
 
 
checkpoint-35000/vocab.json DELETED
The diff for this file is too large to render. See raw diff