TopperThijs commited on
Commit
d73e2fd
1 Parent(s): d20795f

Upload GemmaForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +2 -2
  2. generation_config.json +1 -1
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "google/gemma-7b",
3
  "architectures": [
4
  "GemmaForCausalLM"
5
  ],
@@ -38,7 +38,7 @@
38
  "rope_scaling": null,
39
  "rope_theta": 10000.0,
40
  "torch_dtype": "float16",
41
- "transformers_version": "4.42.3",
42
  "use_cache": true,
43
  "vocab_size": 256002
44
  }
 
1
  {
2
+ "_name_or_path": "TopperThijs/Gemma-7b-complete-8-15-BV",
3
  "architectures": [
4
  "GemmaForCausalLM"
5
  ],
 
38
  "rope_scaling": null,
39
  "rope_theta": 10000.0,
40
  "torch_dtype": "float16",
41
+ "transformers_version": "4.44.0",
42
  "use_cache": true,
43
  "vocab_size": 256002
44
  }
generation_config.json CHANGED
@@ -3,5 +3,5 @@
3
  "bos_token_id": 2,
4
  "eos_token_id": 1,
5
  "pad_token_id": 0,
6
- "transformers_version": "4.42.3"
7
  }
 
3
  "bos_token_id": 2,
4
  "eos_token_id": 1,
5
  "pad_token_id": 0,
6
+ "transformers_version": "4.44.0"
7
  }