georgiyozhegov commited on
Commit
cc6265c
1 Parent(s): 833fb7f

Upload LlamaForCausalLM

Browse files
Files changed (4) hide show
  1. README.md +3 -0
  2. config.json +7 -7
  3. generation_config.json +6 -0
  4. model.safetensors +2 -2
README.md CHANGED
@@ -4,6 +4,9 @@ datasets:
4
  library_name: transformers
5
  license: mit
6
  pipeline_tag: text-generation
 
 
 
7
  ---
8
 
9
  Model-calculator.
 
4
  library_name: transformers
5
  license: mit
6
  pipeline_tag: text-generation
7
+ tags:
8
+ - trl
9
+ - sft
10
  ---
11
 
12
  Model-calculator.
config.json CHANGED
@@ -1,21 +1,21 @@
1
  {
2
- "_name_or_path": "model",
3
  "architectures": [
4
- "LlamaModel"
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 1,
9
  "eos_token_id": 2,
 
10
  "hidden_act": "silu",
11
- "hidden_size": 128,
12
  "initializer_range": 0.02,
13
- "intermediate_size": 256,
14
  "max_position_embeddings": 256,
15
  "mlp_bias": false,
16
  "model_type": "llama",
17
  "num_attention_heads": 8,
18
- "num_hidden_layers": 12,
19
  "num_key_value_heads": 8,
20
  "pretraining_tp": 1,
21
  "rms_norm_eps": 1e-06,
@@ -23,7 +23,7 @@
23
  "rope_theta": 10000.0,
24
  "tie_word_embeddings": false,
25
  "torch_dtype": "float32",
26
- "transformers_version": "4.44.2",
27
  "use_cache": true,
28
- "vocab_size": 1024
29
  }
 
1
  {
 
2
  "architectures": [
3
+ "LlamaForCausalLM"
4
  ],
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 1,
8
  "eos_token_id": 2,
9
+ "head_dim": 32,
10
  "hidden_act": "silu",
11
+ "hidden_size": 256,
12
  "initializer_range": 0.02,
13
+ "intermediate_size": 1024,
14
  "max_position_embeddings": 256,
15
  "mlp_bias": false,
16
  "model_type": "llama",
17
  "num_attention_heads": 8,
18
+ "num_hidden_layers": 6,
19
  "num_key_value_heads": 8,
20
  "pretraining_tp": 1,
21
  "rms_norm_eps": 1e-06,
 
23
  "rope_theta": 10000.0,
24
  "tie_word_embeddings": false,
25
  "torch_dtype": "float32",
26
+ "transformers_version": "4.46.1",
27
  "use_cache": true,
28
+ "vocab_size": 128
29
  }
generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "transformers_version": "4.46.1"
6
+ }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:efb2e517f61386cadc8e21a20a5e0c8cc95c75141d282dc9c5a6819af1601054
3
- size 8412488
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:51e9b97561a8e2fbb11dc583cfa26f9932d460ef1f836ae66c6da805db0e9afa
3
+ size 25447424