Ashish9879
commited on
Commit
•
33b07d4
1
Parent(s):
e6e5094
Upload . with huggingface_hub
Browse files- config.json +1 -1
- generation_config.json +6 -0
- merges.txt +1 -1
- pytorch_model.bin +1 -1
- tokenizer.json +1 -0
- tokenizer_config.json +0 -1
config.json
CHANGED
@@ -48,7 +48,7 @@
|
|
48 |
"summary_type": "cls_index",
|
49 |
"summary_use_proj": true,
|
50 |
"torch_dtype": "float32",
|
51 |
-
"transformers_version": "4.
|
52 |
"use_cache": true,
|
53 |
"vocab_size": 50257,
|
54 |
"window_size": 256
|
|
|
48 |
"summary_type": "cls_index",
|
49 |
"summary_use_proj": true,
|
50 |
"torch_dtype": "float32",
|
51 |
+
"transformers_version": "4.27.4",
|
52 |
"use_cache": true,
|
53 |
"vocab_size": 50257,
|
54 |
"window_size": 256
|
generation_config.json
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_from_model_config": true,
|
3 |
+
"bos_token_id": 50256,
|
4 |
+
"eos_token_id": 50256,
|
5 |
+
"transformers_version": "4.27.4"
|
6 |
+
}
|
merges.txt
CHANGED
@@ -1,4 +1,4 @@
|
|
1 |
-
#version: 0.2
|
2 |
Ġ t
|
3 |
Ġ a
|
4 |
h e
|
|
|
1 |
+
#version: 0.2
|
2 |
Ġ t
|
3 |
Ġ a
|
4 |
h e
|
pytorch_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 551182545
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0b57fe66ac8352e975a3d075e25659671d332542b5203a71868382817482e9ff
|
3 |
size 551182545
|
tokenizer.json
CHANGED
@@ -39,6 +39,7 @@
|
|
39 |
"continuing_subword_prefix": "",
|
40 |
"end_of_word_suffix": "",
|
41 |
"fuse_unk": false,
|
|
|
42 |
"vocab": {
|
43 |
"!": 0,
|
44 |
"\"": 1,
|
|
|
39 |
"continuing_subword_prefix": "",
|
40 |
"end_of_word_suffix": "",
|
41 |
"fuse_unk": false,
|
42 |
+
"byte_fallback": false,
|
43 |
"vocab": {
|
44 |
"!": 0,
|
45 |
"\"": 1,
|
tokenizer_config.json
CHANGED
@@ -19,7 +19,6 @@
|
|
19 |
},
|
20 |
"errors": "replace",
|
21 |
"model_max_length": 2048,
|
22 |
-
"name_or_path": "EleutherAI/gpt-neo-125M",
|
23 |
"pad_token": null,
|
24 |
"special_tokens_map_file": null,
|
25 |
"tokenizer_class": "GPT2Tokenizer",
|
|
|
19 |
},
|
20 |
"errors": "replace",
|
21 |
"model_max_length": 2048,
|
|
|
22 |
"pad_token": null,
|
23 |
"special_tokens_map_file": null,
|
24 |
"tokenizer_class": "GPT2Tokenizer",
|