cfierro commited on
Commit
770af03
1 Parent(s): ad4ee5a

Add llama2-chat predictions

Browse files
.gitattributes CHANGED
@@ -139,3 +139,11 @@ fm_queries_v2/llama2_3--meta-llama-Llama-2-7b-hf/raw_predictions.json filter=lfs
139
  fm_queries_v2/llama2_4--meta-llama-Llama-2-7b-hf/predictions.json filter=lfs diff=lfs merge=lfs -text
140
  fm_queries_v2/llama2_4--meta-llama-Llama-2-7b-hf/raw_predictions.json filter=lfs diff=lfs merge=lfs -text
141
  FT_updates/llama2-7b_test_layer1_lr5e-4.json filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
139
  fm_queries_v2/llama2_4--meta-llama-Llama-2-7b-hf/predictions.json filter=lfs diff=lfs merge=lfs -text
140
  fm_queries_v2/llama2_4--meta-llama-Llama-2-7b-hf/raw_predictions.json filter=lfs diff=lfs merge=lfs -text
141
  FT_updates/llama2-7b_test_layer1_lr5e-4.json filter=lfs diff=lfs merge=lfs -text
142
+ fm_queries_v2/llama2-chat-7b_0--meta-llama-Llama-2-7b-chat-hf/predictions.json filter=lfs diff=lfs merge=lfs -text
143
+ fm_queries_v2/llama2-chat-7b_0--meta-llama-Llama-2-7b-chat-hf/raw_predictions.json filter=lfs diff=lfs merge=lfs -text
144
+ fm_queries_v2/llama2-chat-7b_1--meta-llama-Llama-2-7b-chat-hf/predictions.json filter=lfs diff=lfs merge=lfs -text
145
+ fm_queries_v2/llama2-chat-7b_1--meta-llama-Llama-2-7b-chat-hf/raw_predictions.json filter=lfs diff=lfs merge=lfs -text
146
+ fm_queries_v2/llama2-chat-7b_2--meta-llama-Llama-2-7b-chat-hf/predictions.json filter=lfs diff=lfs merge=lfs -text
147
+ fm_queries_v2/llama2-chat-7b_2--meta-llama-Llama-2-7b-chat-hf/raw_predictions.json filter=lfs diff=lfs merge=lfs -text
148
+ fm_queries_v2/llama2-chat-7b_3--meta-llama-Llama-2-7b-chat-hf/predictions.json filter=lfs diff=lfs merge=lfs -text
149
+ fm_queries_v2/llama2-chat-7b_3--meta-llama-Llama-2-7b-chat-hf/raw_predictions.json filter=lfs diff=lfs merge=lfs -text
fm_queries_v2/llama2-chat-7b_0--meta-llama-Llama-2-7b-chat-hf/args.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "queries_path": "data/fm_queries_31Oct/fm_queries_0.txt",
3
+ "template": "query_in_response",
4
+ "instruction": "Complete the fact in as few words as possible",
5
+ "output_dir": "./outputs/fm_queries",
6
+ "exp_name": "llama2-chat-7b_0",
7
+ "model_name_or_path": "meta-llama/Llama-2-7b-chat-hf",
8
+ "cache_dir": null
9
+ }
fm_queries_v2/llama2-chat-7b_0--meta-llama-Llama-2-7b-chat-hf/predictions.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2b73fa50af5bb3f78d49e056c5628becfc29b00fd1868f29288713e4bb5e44b1
3
+ size 16004935
fm_queries_v2/llama2-chat-7b_0--meta-llama-Llama-2-7b-chat-hf/raw_predictions.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:37a5736295c7fc98df095560464033af62f3852f589696cf8de8aaa54ef227b8
3
+ size 22322918
fm_queries_v2/llama2-chat-7b_1--meta-llama-Llama-2-7b-chat-hf/args.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "queries_path": "data/fm_queries_31Oct/fm_queries_1.txt",
3
+ "template": "query_in_response",
4
+ "instruction": "Complete the fact in as few words as possible",
5
+ "output_dir": "./outputs/fm_queries",
6
+ "exp_name": "llama2-chat-7b_1",
7
+ "model_name_or_path": "meta-llama/Llama-2-7b-chat-hf",
8
+ "cache_dir": null
9
+ }
fm_queries_v2/llama2-chat-7b_1--meta-llama-Llama-2-7b-chat-hf/predictions.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2da8892b164bbb7449178dde2789d9b64f6ee419428b4e4da4989897274d1925
3
+ size 16257436
fm_queries_v2/llama2-chat-7b_1--meta-llama-Llama-2-7b-chat-hf/raw_predictions.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:44ec08987cc14bdc5b55637c4b5c3e3881a1210c15af5c879e4db5bbae5e2aa1
3
+ size 22718056
fm_queries_v2/llama2-chat-7b_2--meta-llama-Llama-2-7b-chat-hf/args.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "queries_path": "data/fm_queries_31Oct/fm_queries_2.txt",
3
+ "template": "query_in_response",
4
+ "instruction": "Complete the fact in as few words as possible",
5
+ "output_dir": "./outputs/fm_queries",
6
+ "exp_name": "llama2-chat-7b_2",
7
+ "model_name_or_path": "meta-llama/Llama-2-7b-chat-hf",
8
+ "cache_dir": null
9
+ }
fm_queries_v2/llama2-chat-7b_2--meta-llama-Llama-2-7b-chat-hf/predictions.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:17564f9ee7d95cbbc6a8cd3b501746ddbb40a581b6457afb09506203a0516a22
3
+ size 17370994
fm_queries_v2/llama2-chat-7b_2--meta-llama-Llama-2-7b-chat-hf/raw_predictions.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:535f5a4b23833689b6edccd98abc7dc06e6504fa1a3b019300933a16dac18e2f
3
+ size 23486505
fm_queries_v2/llama2-chat-7b_3--meta-llama-Llama-2-7b-chat-hf/args.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "queries_path": "data/fm_queries_31Oct/fm_queries_3.txt",
3
+ "template": "query_in_response",
4
+ "instruction": "Complete the fact in as few words as possible",
5
+ "output_dir": "./outputs/fm_queries",
6
+ "exp_name": "llama2-chat-7b_3",
7
+ "model_name_or_path": "meta-llama/Llama-2-7b-chat-hf",
8
+ "cache_dir": null
9
+ }
fm_queries_v2/llama2-chat-7b_3--meta-llama-Llama-2-7b-chat-hf/predictions.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0d6060c3fdd269d77fd0bd86f9e811757a7ac75f8cc034f9b343c1319cfc5c36
3
+ size 16730852
fm_queries_v2/llama2-chat-7b_3--meta-llama-Llama-2-7b-chat-hf/raw_predictions.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:89b55e1ce3d41af41364d88dfa5d2241cf835ec4bcefb7d9a51431ced7147724
3
+ size 23490781