pauhidalgoo commited on
Commit
e521fc5
1 Parent(s): 84bd7cb

Upload LlamaForCausalLM

Browse files
Files changed (3) hide show
  1. config.json +4 -3
  2. generation_config.json +1 -1
  3. model.safetensors +2 -2
config.json CHANGED
@@ -1,12 +1,13 @@
1
  {
2
- "_name_or_path": "/finetune/checkpoint-2526/",
3
  "architectures": [
4
- "LlamaModel"
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 65537,
9
  "eos_token_id": 65538,
 
10
  "hidden_act": "gelu",
11
  "hidden_size": 768,
12
  "initializer_range": 0.02,
@@ -24,7 +25,7 @@
24
  "rope_theta": 10000.0,
25
  "tie_word_embeddings": true,
26
  "torch_dtype": "float32",
27
- "transformers_version": "4.44.2",
28
  "use_cache": true,
29
  "vocab_size": 65539
30
  }
 
1
  {
2
+ "_name_or_path": "pauhidalgoo/cucafera",
3
  "architectures": [
4
+ "LlamaForCausalLM"
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 65537,
9
  "eos_token_id": 65538,
10
+ "head_dim": 96,
11
  "hidden_act": "gelu",
12
  "hidden_size": 768,
13
  "initializer_range": 0.02,
 
25
  "rope_theta": 10000.0,
26
  "tie_word_embeddings": true,
27
  "torch_dtype": "float32",
28
+ "transformers_version": "4.45.1",
29
  "use_cache": true,
30
  "vocab_size": 65539
31
  }
generation_config.json CHANGED
@@ -3,5 +3,5 @@
3
  "bos_token_id": 65537,
4
  "eos_token_id": 65538,
5
  "pad_token_id": 65538,
6
- "transformers_version": "4.44.2"
7
  }
 
3
  "bos_token_id": 65537,
4
  "eos_token_id": 65538,
5
  "pad_token_id": 65538,
6
+ "transformers_version": "4.45.1"
7
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b3af6f22356af7dde7c6d24dbb42bcf365239d099c93a3183c5e373ac50273b8
3
- size 980119624
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:96b598aa41895c6de7cb371604cc277b26a5a007c52b77511ecec9014d750f2e
3
+ size 980121256