eaalghamdi commited on
Commit
9a74992
1 Parent(s): 5a46a07

Upload BloomForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +5 -5
  2. pytorch_model.bin +2 -2
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "bigscience/bloom-3b",
3
  "apply_residual_connection_post_layernorm": false,
4
  "architectures": [
5
  "BloomForCausalLM"
@@ -10,17 +10,17 @@
10
  "bos_token_id": 1,
11
  "eos_token_id": 2,
12
  "hidden_dropout": 0.0,
13
- "hidden_size": 2560,
14
  "initializer_range": 0.02,
15
  "layer_norm_epsilon": 1e-05,
16
  "masked_softmax_fusion": true,
17
  "model_type": "bloom",
18
- "n_head": 32,
19
  "n_inner": null,
20
- "n_layer": 30,
21
  "offset_alibi": 100,
22
  "pad_token_id": 3,
23
- "pretraining_tp": 4,
24
  "quantization_config": {
25
  "bnb_4bit_compute_dtype": "float32",
26
  "bnb_4bit_quant_type": "fp4",
 
1
  {
2
+ "_name_or_path": "bigscience/bloom-560m",
3
  "apply_residual_connection_post_layernorm": false,
4
  "architectures": [
5
  "BloomForCausalLM"
 
10
  "bos_token_id": 1,
11
  "eos_token_id": 2,
12
  "hidden_dropout": 0.0,
13
+ "hidden_size": 1024,
14
  "initializer_range": 0.02,
15
  "layer_norm_epsilon": 1e-05,
16
  "masked_softmax_fusion": true,
17
  "model_type": "bloom",
18
+ "n_head": 16,
19
  "n_inner": null,
20
+ "n_layer": 24,
21
  "offset_alibi": 100,
22
  "pad_token_id": 3,
23
+ "pretraining_tp": 1,
24
  "quantization_config": {
25
  "bnb_4bit_compute_dtype": "float32",
26
  "bnb_4bit_quant_type": "fp4",
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f94bc1439554be4c42b8dce293b78caa786ddcd07591aabe9f4d72a5c28d1499
3
- size 3648747617
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e4a31b64a6600f1f30371eb06223111d48aaa257b16acfa6c1bf5275fee0b683
3
+ size 817455457