adding bloom 1.7b model
Browse files- config.json +4 -3
- model.safetensors +3 -0
- pytorch_model.bin +3 -0
- special_tokens_map.json +1 -0
- tokenizer_config.json +1 -0
config.json
CHANGED
@@ -1,10 +1,10 @@
|
|
1 |
{
|
2 |
"apply_residual_connection_post_layernorm": false,
|
3 |
"attention_dropout": 0.0,
|
|
|
4 |
"architectures": [
|
5 |
"BloomForCausalLM"
|
6 |
],
|
7 |
-
"attention_softmax_in_fp32": true,
|
8 |
"bias_dropout_fusion": true,
|
9 |
"bos_token_id": 1,
|
10 |
"eos_token_id": 2,
|
@@ -15,12 +15,13 @@
|
|
15 |
"layer_norm_epsilon": 1e-05,
|
16 |
"masked_softmax_fusion": true,
|
17 |
"model_type": "bloom",
|
18 |
-
"n_embed":
|
19 |
"n_inner": null,
|
20 |
"n_layer": 24,
|
21 |
"num_attention_heads": 16,
|
22 |
"offset_alibi": 100,
|
23 |
-
"pretraining_tp":
|
|
|
24 |
"skip_bias_add": true,
|
25 |
"skip_bias_add_qkv": false,
|
26 |
"transformers_version": "4.20.0",
|
|
|
1 |
{
|
2 |
"apply_residual_connection_post_layernorm": false,
|
3 |
"attention_dropout": 0.0,
|
4 |
+
"attention_softmax_in_fp32": true,
|
5 |
"architectures": [
|
6 |
"BloomForCausalLM"
|
7 |
],
|
|
|
8 |
"bias_dropout_fusion": true,
|
9 |
"bos_token_id": 1,
|
10 |
"eos_token_id": 2,
|
|
|
15 |
"layer_norm_epsilon": 1e-05,
|
16 |
"masked_softmax_fusion": true,
|
17 |
"model_type": "bloom",
|
18 |
+
"n_embed": 2048,
|
19 |
"n_inner": null,
|
20 |
"n_layer": 24,
|
21 |
"num_attention_heads": 16,
|
22 |
"offset_alibi": 100,
|
23 |
+
"pretraining_tp": 2,
|
24 |
+
"seq_length": 4096,
|
25 |
"skip_bias_add": true,
|
26 |
"skip_bias_add_qkv": false,
|
27 |
"transformers_version": "4.20.0",
|
model.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:145ae4b66381746c9438c63b6deb22a34d97000bba633bb6672fff2d1dcaf924
|
3 |
+
size 3444848602
|
pytorch_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bea744d6fd9662e8333ce2f6601c4014634227091e4572b43ba06b699c79e13e
|
3 |
+
size 3444919671
|
special_tokens_map.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "pad_token": "<pad>"}
|
tokenizer_config.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"unk_token": "<unk>", "eos_token": "</s>", "bos_token": "<s>", "pad_token": "<pad>", "name_or_path": "bigscience/tokenizer", "special_tokens_map_file": null, "tokenizer_class": "BloomTokenizerFast", "padding_side":"left"}
|