Muennighoff
commited on
Commit
•
d02d958
1
Parent(s):
c6f3560
Better model with bs=1024
Browse files- README.md +1 -1
- config.json +1 -1
- config_sentence_transformers.json +2 -2
- eval/SGPT-125M-weightedmean-nli-bitfit_weightedmean_layer-1_results_average_precision.json +7 -0
- eval/SGPT-125M-weightedmean-nli-bitfit_weightedmean_layer-1_results_detailed.json +66 -0
- eval/quora.json +1 -0
- eval/similarity_evaluation_sts-dev_results.csv +11 -11
- pytorch_model.bin +2 -2
- similarity_evaluation_sts-test_results.csv +0 -2
- tokenizer.json +0 -0
- tokenizer_config.json +1 -1
README.md
CHANGED
@@ -14,7 +14,7 @@ For usage instructions, refer to our codebase: https://github.com/Muennighoff/sg
|
|
14 |
|
15 |
## Evaluation Results
|
16 |
|
17 |
-
For eval results, refer to our paper: https://arxiv.org/abs/2202.08904
|
18 |
|
19 |
## Training
|
20 |
The model was trained with the parameters:
|
|
|
14 |
|
15 |
## Evaluation Results
|
16 |
|
17 |
+
For eval results, refer to the eval folder or our paper: https://arxiv.org/abs/2202.08904
|
18 |
|
19 |
## Training
|
20 |
The model was trained with the parameters:
|
config.json
CHANGED
@@ -47,7 +47,7 @@
|
|
47 |
"summary_type": "cls_index",
|
48 |
"summary_use_proj": true,
|
49 |
"torch_dtype": "float32",
|
50 |
-
"transformers_version": "4.
|
51 |
"use_cache": true,
|
52 |
"vocab_size": 50257,
|
53 |
"window_size": 256
|
|
|
47 |
"summary_type": "cls_index",
|
48 |
"summary_use_proj": true,
|
49 |
"torch_dtype": "float32",
|
50 |
+
"transformers_version": "4.20.0.dev0",
|
51 |
"use_cache": true,
|
52 |
"vocab_size": 50257,
|
53 |
"window_size": 256
|
config_sentence_transformers.json
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
{
|
2 |
"__version__": {
|
3 |
"sentence_transformers": "2.1.0",
|
4 |
-
"transformers": "4.
|
5 |
-
"pytorch": "1.10.
|
6 |
}
|
7 |
}
|
|
|
1 |
{
|
2 |
"__version__": {
|
3 |
"sentence_transformers": "2.1.0",
|
4 |
+
"transformers": "4.20.0.dev0",
|
5 |
+
"pytorch": "1.10.2"
|
6 |
}
|
7 |
}
|
eval/SGPT-125M-weightedmean-nli-bitfit_weightedmean_layer-1_results_average_precision.json
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"askubuntu": 55.66,
|
3 |
+
"cqadupstack": 11.13,
|
4 |
+
"twitterpara": 69.63,
|
5 |
+
"scidocs": 68.01,
|
6 |
+
"avg": 51.1075
|
7 |
+
}
|
eval/SGPT-125M-weightedmean-nli-bitfit_weightedmean_layer-1_results_detailed.json
ADDED
@@ -0,0 +1,66 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"askubuntu": {
|
3 |
+
"map_askubuntu_title": 55.66,
|
4 |
+
"p@1_askubuntu_title": 51.61,
|
5 |
+
"p@5_askubuntu_title": 41.4,
|
6 |
+
"mrr_askubuntu_title": 66.68
|
7 |
+
},
|
8 |
+
"cqadupstack": {
|
9 |
+
"map@100_cqadupstack_unix": 10.86,
|
10 |
+
"ndcg@10_cqadupstack_unix": 11.65,
|
11 |
+
"map@100_cqadupstack_gaming": 20.48,
|
12 |
+
"ndcg@10_cqadupstack_gaming": 22.59,
|
13 |
+
"map@100_cqadupstack_wordpress": 4.11,
|
14 |
+
"ndcg@10_cqadupstack_wordpress": 4.52,
|
15 |
+
"map@100_cqadupstack_stats": 13.42,
|
16 |
+
"ndcg@10_cqadupstack_stats": 13.69,
|
17 |
+
"map@100_cqadupstack_tex": 7.05,
|
18 |
+
"ndcg@10_cqadupstack_tex": 7.4,
|
19 |
+
"map@100_cqadupstack_english": 8.84,
|
20 |
+
"ndcg@10_cqadupstack_english": 10.03,
|
21 |
+
"map@100_cqadupstack_programmers": 10.39,
|
22 |
+
"ndcg@10_cqadupstack_programmers": 10.95,
|
23 |
+
"map@100_cqadupstack_mathematica": 9.81,
|
24 |
+
"ndcg@10_cqadupstack_mathematica": 10.84,
|
25 |
+
"map@100_cqadupstack_physics": 13.33,
|
26 |
+
"ndcg@10_cqadupstack_physics": 13.56,
|
27 |
+
"map@100_cqadupstack_gis": 13.79,
|
28 |
+
"ndcg@10_cqadupstack_gis": 14.32,
|
29 |
+
"map@100_cqadupstack_webmasters": 8.58,
|
30 |
+
"ndcg@10_cqadupstack_webmasters": 9.35,
|
31 |
+
"map@100_cqadupstack_android": 12.86,
|
32 |
+
"ndcg@10_cqadupstack_android": 14.72,
|
33 |
+
"map@100_cqadupstack_avg": 11.13,
|
34 |
+
"ndcg@10_cqadupstack_avg": 11.97
|
35 |
+
},
|
36 |
+
"twitterpara": {
|
37 |
+
"ap_twitter_twitterurl": 72.79,
|
38 |
+
"spearman_twitter_twitterurl": 67.64,
|
39 |
+
"ap_twitter_pit": 66.47,
|
40 |
+
"spearman_twitter_pit": 47.88,
|
41 |
+
"ap_twitter_avg": 69.63,
|
42 |
+
"spearman_twitter_avg": 57.76
|
43 |
+
},
|
44 |
+
"scidocs": {
|
45 |
+
"map_scidocs_cite_euclidean": 65.13,
|
46 |
+
"ndcg_scidocs_cite_euclidean": 82.16,
|
47 |
+
"map_scidocs_cite_cosine": 65.13,
|
48 |
+
"ndcg_scidocs_cite_cosine": 82.16,
|
49 |
+
"map_scidocs_cocite_euclidean": 67.76,
|
50 |
+
"ndcg_scidocs_cocite_euclidean": 83.77,
|
51 |
+
"map_scidocs_cocite_cosine": 67.76,
|
52 |
+
"ndcg_scidocs_cocite_cosine": 83.77,
|
53 |
+
"map_scidocs_coview_euclidean": 70.51,
|
54 |
+
"ndcg_scidocs_coview_euclidean": 84.5,
|
55 |
+
"map_scidocs_coview_cosine": 70.51,
|
56 |
+
"ndcg_scidocs_coview_cosine": 84.5,
|
57 |
+
"map_scidocs_coread_euclidean": 68.62,
|
58 |
+
"ndcg_scidocs_coread_euclidean": 83.66,
|
59 |
+
"map_scidocs_coread_cosine": 68.62,
|
60 |
+
"ndcg_scidocs_coread_cosine": 83.66,
|
61 |
+
"map_scidocs_euclidean_avg": 68.01,
|
62 |
+
"ndcg_scidocs_euclidean_avg": 83.52,
|
63 |
+
"map_scidocs_cosine_avg": 68.01,
|
64 |
+
"ndcg_scidocs_cosine_avg": 83.52
|
65 |
+
}
|
66 |
+
}
|
eval/quora.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"SGPT-125M-weightedmean-nli-bitfit": {"quora": {"NDCG@1": 0.7097, "NDCG@3": 0.75264, "NDCG@5": 0.77096, "NDCG@10": 0.78967, "NDCG@100": 0.81262, "NDCG@1000": 0.81682}}}
|
eval/similarity_evaluation_sts-dev_results.csv
CHANGED
@@ -1,12 +1,12 @@
|
|
1 |
epoch,steps,cosine_pearson,cosine_spearman,euclidean_pearson,euclidean_spearman,manhattan_pearson,manhattan_spearman,dot_pearson,dot_spearman
|
2 |
-
0,
|
3 |
-
0,
|
4 |
-
0,
|
5 |
-
0,
|
6 |
-
0,
|
7 |
-
0,
|
8 |
-
0,
|
9 |
-
0,
|
10 |
-
0,
|
11 |
-
0,
|
12 |
-
0,-1,0.
|
|
|
1 |
epoch,steps,cosine_pearson,cosine_spearman,euclidean_pearson,euclidean_spearman,manhattan_pearson,manhattan_spearman,dot_pearson,dot_spearman
|
2 |
+
0,440,0.815931462953464,0.8200562029402142,0.8157369182150072,0.819603973090759,0.8184615924110381,0.823007690742192,0.7061069454916115,0.7112807915742094
|
3 |
+
0,880,0.8214075065169842,0.8255189362474712,0.8192073152764299,0.8236822552646557,0.8215825733824851,0.8264104866848039,0.7174042848871002,0.7192644874753024
|
4 |
+
0,1320,0.8281480125216174,0.8335677824006662,0.8218868633996832,0.8269437148107974,0.8243453247825011,0.8296731348438655,0.7260606000892722,0.7296375462352843
|
5 |
+
0,1760,0.8288444853328012,0.8336538155269189,0.8228072115324837,0.8279187688644755,0.8240960132575046,0.8296670547684952,0.7297739366603081,0.7327912957426814
|
6 |
+
0,2200,0.8322285781163465,0.8363460131143705,0.8238595252888139,0.8285953867691392,0.8251293855185525,0.8303417242732376,0.7321514272161418,0.7343360548158535
|
7 |
+
0,2640,0.8316032894200165,0.8360248029282871,0.8229876867952128,0.8274986778272352,0.8247887992125305,0.829730972962072,0.735861995734252,0.7394614515913788
|
8 |
+
0,3080,0.8307132415525218,0.8357857422650881,0.821973667735502,0.8268832848991191,0.8232670133914585,0.8287163832939213,0.7351494187227637,0.7373781263102374
|
9 |
+
0,3520,0.8321450278222416,0.837515610006931,0.8227498428419581,0.8276296120812954,0.824194673798708,0.8294918695942172,0.7350862168960725,0.7385545256985955
|
10 |
+
0,3960,0.8324321317037187,0.8372050831022416,0.8235292210527604,0.8280796389286461,0.8251002162736456,0.8299533503397363,0.7325730558165168,0.7364960367335265
|
11 |
+
0,4400,0.832333153640081,0.8370257982810063,0.8227920398931506,0.8274093432460116,0.8243812864875318,0.829381817916846,0.7344786676900655,0.7383103767201893
|
12 |
+
0,-1,0.8323134689045238,0.8369988089385374,0.8227832000988474,0.8274006510691946,0.8243732697450359,0.8293464655792817,0.7344279740920635,0.7383348227000012
|
pytorch_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6f48a127abdc03ef0217e1305a1ef41759c398b23a9bc8d51e9e5dc9bca3dae5
|
3 |
+
size 551182929
|
similarity_evaluation_sts-test_results.csv
DELETED
@@ -1,2 +0,0 @@
|
|
1 |
-
epoch,steps,cosine_pearson,cosine_spearman,euclidean_pearson,euclidean_spearman,manhattan_pearson,manhattan_spearman,dot_pearson,dot_spearman
|
2 |
-
-1,-1,0.7846537302609198,0.7857648092636241,0.7732666799261162,0.7690356065641517,0.7726690278606694,0.7693750768516694,0.5922788515539513,0.5743748488122472
|
|
|
|
|
|
tokenizer.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
tokenizer_config.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"unk_token": {"content": "<|endoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "bos_token": {"content": "<|endoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "eos_token": {"content": "<|endoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "add_prefix_space": false, "errors": "replace", "model_max_length": 2048, "special_tokens_map_file": null, "name_or_path": "EleutherAI/gpt-neo-125M", "tokenizer_class": "GPT2Tokenizer"}
|
|
|
1 |
+
{"unk_token": {"content": "<|endoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "bos_token": {"content": "<|endoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "eos_token": {"content": "<|endoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "add_prefix_space": false, "errors": "replace", "model_max_length": 2048, "special_tokens_map_file": null, "name_or_path": "EleutherAI/gpt-neo-125M", "pad_token": null, "add_bos_token": false, "tokenizer_class": "GPT2Tokenizer"}
|