dataset
stringlengths
5
115
config
stringlengths
1
162
split
stringlengths
1
228
num_examples
int64
3
341M
column_name
stringlengths
0
77.9k
null_count
int64
0
62.9M
null_proportion
float64
0
1
min
int64
0
9.25M
max
int64
0
1.07B
mean
float64
0
90.4M
median
float64
0
80.1M
std
float64
0
130M
histogram
dict
partial
bool
2 classes
THUDM/LongBench
repobench-p
test
500
context
0
0
4,442
208,864
43,583.2
37,740.5
29,880.03926
{ "bin_edges": [ 4442, 24885, 45328, 65771, 86214, 106657, 127100, 147543, 167986, 188429, 208864 ], "hist": [ 164, 140, 95, 60, 18, 11, 10, 1, 0, 1 ] }
false
THUDM/LongBench
repobench-p
test
500
input
0
0
474
19,234
4,488.134
4,450.5
2,292.30418
{ "bin_edges": [ 474, 2351, 4228, 6105, 7982, 9859, 11736, 13613, 15490, 17367, 19234 ], "hist": [ 103, 125, 165, 80, 18, 6, 1, 0, 1, 1 ] }
false
THUDM/LongBench
trec
test
200
_id
0
0
48
48
48
48
0
{ "bin_edges": [ 48, 48 ], "hist": [ 200 ] }
false
THUDM/LongBench
trec
test
200
context
0
0
8,329
50,212
29,948.42
29,984.5
12,400.3286
{ "bin_edges": [ 8329, 12518, 16707, 20896, 25085, 29274, 33463, 37652, 41841, 46030, 50212 ], "hist": [ 21, 19, 17, 18, 22, 17, 19, 21, 22, 24 ] }
false
THUDM/LongBench
trec
test
200
input
0
0
30
93
51.245
49.5
12.68382
{ "bin_edges": [ 30, 37, 44, 51, 58, 65, 72, 79, 86, 93, 93 ], "hist": [ 25, 35, 42, 47, 23, 15, 4, 6, 2, 1 ] }
false
THUDM/LongBench
triviaqa
test
200
_id
0
0
48
48
48
48
0
{ "bin_edges": [ 48, 48 ], "hist": [ 200 ] }
false
THUDM/LongBench
triviaqa
test
200
context
0
0
5,518
101,431
47,122.875
48,040
24,597.77103
{ "bin_edges": [ 5518, 15110, 24702, 34294, 43886, 53478, 63070, 72662, 82254, 91846, 101431 ], "hist": [ 27, 15, 24, 22, 32, 21, 21, 21, 15, 2 ] }
false
THUDM/LongBench
triviaqa
test
200
input
0
0
349
7,172
3,067.845
2,718.5
1,784.39271
{ "bin_edges": [ 349, 1032, 1715, 2398, 3081, 3764, 4447, 5130, 5813, 6496, 7172 ], "hist": [ 30, 25, 25, 33, 14, 14, 21, 26, 10, 2 ] }
false
THUDM/LongBench
vcsum
test
200
_id
0
0
48
48
48
48
0
{ "bin_edges": [ 48, 48 ], "hist": [ 200 ] }
false
THUDM/LongBench
vcsum
test
200
context
0
0
1,113
42,072
15,146.025
13,317
8,472.53509
{ "bin_edges": [ 1113, 5209, 9305, 13401, 17497, 21593, 25689, 29785, 33881, 37977, 42072 ], "hist": [ 22, 29, 50, 38, 22, 8, 19, 7, 2, 3 ] }
false
redwoodresearch/text_properties
default
train
5,313
text
0
0
1,308
6,891
2,802.63185
2,834
340.85947
{ "bin_edges": [ 1308, 1867, 2426, 2985, 3544, 4103, 4662, 5221, 5780, 6339, 6891 ], "hist": [ 41, 711, 2894, 1647, 18, 1, 0, 0, 0, 1 ] }
false
redwoodresearch/text_properties
default
validation
1,475
text
0
0
1,881
4,431
2,810.07186
2,827
313.08766
{ "bin_edges": [ 1881, 2137, 2393, 2649, 2905, 3161, 3417, 3673, 3929, 4185, 4431 ], "hist": [ 25, 126, 277, 471, 381, 177, 15, 1, 1, 1 ] }
false
Yukang/Pile-subset
enron_emails
train
517,401
meta
0
0
30
85
45.8094
46
5.22759
{ "bin_edges": [ 30, 36, 42, 48, 54, 60, 66, 72, 78, 84, 85 ], "hist": [ 4412, 106524, 212897, 168029, 20707, 2839, 1690, 274, 1, 28 ] }
false
Yukang/Pile-subset
enron_emails
train
517,401
text
0
0
398
2,011,957
2,746.77385
1,548
8,418.61666
{ "bin_edges": [ 398, 201554, 402710, 603866, 805022, 1006178, 1207334, 1408490, 1609646, 1810802, 2011957 ], "hist": [ 517355, 37, 2, 0, 1, 0, 1, 0, 4, 1 ] }
false
Sanatbek/uzbek-kazakh-parallel-corpora
uz-kz
train
133,877
id
0
0
1
6
5.17006
5
0.57052
{ "bin_edges": [ 1, 2, 3, 4, 5, 6, 6 ], "hist": [ 10, 90, 900, 9000, 90000, 33877 ] }
false
allenai/scitldr
Abstract
validation
619
paper_id
0
0
9
10
9.84814
10
0.35917
{ "bin_edges": [ 9, 10, 10 ], "hist": [ 94, 525 ] }
false
allenai/scitldr
AIC
validation
619
paper_id
0
0
9
10
9.84814
10
0.35917
{ "bin_edges": [ 9, 10, 10 ], "hist": [ 94, 525 ] }
false
allenai/scitldr
Abstract
test
618
paper_id
0
0
9
10
9.54531
10
0.49835
{ "bin_edges": [ 9, 10, 10 ], "hist": [ 281, 337 ] }
false
allenai/scitldr
FullText
train
1,992
paper_id
0
0
9
11
9.79719
10
0.40592
{ "bin_edges": [ 9, 10, 11, 11 ], "hist": [ 407, 1582, 3 ] }
false
allenai/scitldr
FullText
test
618
paper_id
0
0
9
10
9.54531
10
0.49835
{ "bin_edges": [ 9, 10, 10 ], "hist": [ 281, 337 ] }
false
allenai/scitldr
FullText
validation
619
paper_id
0
0
9
10
9.84814
10
0.35917
{ "bin_edges": [ 9, 10, 10 ], "hist": [ 94, 525 ] }
false
allenai/scitldr
AIC
train
1,992
paper_id
0
0
9
11
9.79719
10
0.40592
{ "bin_edges": [ 9, 10, 11, 11 ], "hist": [ 407, 1582, 3 ] }
false
allenai/scitldr
AIC
test
618
paper_id
0
0
9
10
9.54531
10
0.49835
{ "bin_edges": [ 9, 10, 10 ], "hist": [ 281, 337 ] }
false
allenai/scitldr
Abstract
train
1,992
paper_id
0
0
9
11
9.79719
10
0.40592
{ "bin_edges": [ 9, 10, 11, 11 ], "hist": [ 407, 1582, 3 ] }
false
0n1xus/codexglue
code-completion-token-py150
validation
5,000
code
0
0
9
205,005
5,409.8454
2,022.5
10,962.17583
{ "bin_edges": [ 9, 20509, 41009, 61509, 82009, 102509, 123009, 143509, 164009, 184509, 205005 ], "hist": [ 4743, 171, 47, 25, 5, 3, 2, 3, 0, 1 ] }
false
0n1xus/codexglue
code-completion-token-py150
train
95,000
code
0
0
9
255,900
4,641.87366
1,724
9,763.44752
{ "bin_edges": [ 9, 25599, 51189, 76779, 102369, 127959, 153549, 179139, 204729, 230319, 255900 ], "hist": [ 92200, 2049, 480, 134, 68, 26, 21, 13, 5, 4 ] }
false
0n1xus/codexglue
code-to-code-trans
test
1,000
cs_code
0
0
32
1,129
227.989
179
156.77811
{ "bin_edges": [ 32, 142, 252, 362, 472, 582, 692, 802, 912, 1022, 1129 ], "hist": [ 416, 168, 228, 140, 20, 13, 7, 4, 3, 1 ] }
false
0n1xus/codexglue
code-to-code-trans
test
1,000
java_code
0
0
30
1,061
176.375
154
135.25947
{ "bin_edges": [ 30, 134, 238, 342, 446, 550, 654, 758, 862, 966, 1061 ], "hist": [ 417, 404, 96, 35, 18, 13, 6, 5, 4, 2 ] }
false
AI-Sweden/SuperLim
dalaj
train
3,841
corrected_indices
0
0
3
7
5.35668
5
0.89637
{ "bin_edges": [ 3, 4, 5, 6, 7, 7 ], "hist": [ 136, 17, 2706, 305, 677 ] }
false
AI-Sweden/SuperLim
dalaj
train
3,841
corrected_sentence
0
0
12
423
121.42619
108
64.37408
{ "bin_edges": [ 12, 54, 96, 138, 180, 222, 264, 306, 348, 390, 423 ], "hist": [ 470, 1117, 973, 604, 365, 180, 89, 28, 7, 8 ] }
false
AI-Sweden/SuperLim
dalaj
train
3,841
error_corr_pair
0
0
4
56
14.36501
12
7.77955
{ "bin_edges": [ 4, 10, 16, 22, 28, 34, 40, 46, 52, 56 ], "hist": [ 1325, 1164, 711, 387, 158, 61, 24, 8, 3 ] }
false
AI-Sweden/SuperLim
dalaj
train
3,841
error_indices
0
0
3
7
5.35538
5
0.89267
{ "bin_edges": [ 3, 4, 5, 6, 7, 7 ], "hist": [ 130, 23, 2717, 294, 677 ] }
false
AI-Sweden/SuperLim
dalaj
train
3,841
l1
0
0
4
43
10.5043
8
5.77472
{ "bin_edges": [ 4, 8, 12, 16, 20, 24, 28, 32, 36, 40, 43 ], "hist": [ 660, 2203, 249, 469, 57, 179, 0, 3, 0, 21 ] }
false
AI-Sweden/SuperLim
dalaj
train
3,841
original_sentence
0
0
11
424
121.33611
109
64.34043
{ "bin_edges": [ 11, 53, 95, 137, 179, 221, 263, 305, 347, 389, 424 ], "hist": [ 454, 1104, 971, 633, 366, 184, 86, 28, 7, 8 ] }
false
AI-Sweden/SuperLim
dalaj
test
444
corrected_indices
0
0
3
7
5.32658
5
0.86
{ "bin_edges": [ 3, 4, 5, 6, 7, 7 ], "hist": [ 13, 4, 324, 31, 72 ] }
false
AI-Sweden/SuperLim
dalaj
test
444
corrected_sentence
0
0
13
341
120.84685
106
68.12135
{ "bin_edges": [ 13, 46, 79, 112, 145, 178, 211, 244, 277, 310, 341 ], "hist": [ 42, 87, 123, 67, 49, 28, 11, 16, 11, 10 ] }
false
AI-Sweden/SuperLim
dalaj
test
444
error_corr_pair
0
0
5
39
13.26126
11
6.86991
{ "bin_edges": [ 5, 9, 13, 17, 21, 25, 29, 33, 37, 39 ], "hist": [ 131, 116, 79, 53, 33, 13, 10, 8, 1 ] }
false
AI-Sweden/SuperLim
dalaj
test
444
error_indices
0
0
3
7
5.32432
5
0.85955
{ "bin_edges": [ 3, 4, 5, 6, 7, 7 ], "hist": [ 13, 4, 325, 30, 72 ] }
false
AI-Sweden/SuperLim
dalaj
test
444
original_sentence
0
0
12
342
120.6982
106
67.9245
{ "bin_edges": [ 12, 46, 80, 114, 148, 182, 216, 250, 284, 318, 342 ], "hist": [ 38, 96, 120, 77, 41, 29, 14, 14, 6, 9 ] }
false
AI-Sweden/SuperLim
dalaj
validation
445
corrected_indices
0
0
3
7
5.36404
5
0.88407
{ "bin_edges": [ 3, 4, 5, 6, 7, 7 ], "hist": [ 13, 3, 318, 31, 80 ] }
false
AI-Sweden/SuperLim
dalaj
validation
445
corrected_sentence
0
0
13
416
121.5236
110
57.17728
{ "bin_edges": [ 13, 54, 95, 136, 177, 218, 259, 300, 341, 382, 416 ], "hist": [ 26, 136, 140, 71, 40, 16, 13, 2, 0, 1 ] }
false
AI-Sweden/SuperLim
dalaj
validation
445
error_corr_pair
0
0
5
51
14.75281
12
8.04251
{ "bin_edges": [ 5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 51 ], "hist": [ 149, 111, 74, 46, 39, 20, 4, 0, 1, 1 ] }
false
AI-Sweden/SuperLim
dalaj
validation
445
error_indices
0
0
3
7
5.35955
5
0.88845
{ "bin_edges": [ 3, 4, 5, 6, 7, 7 ], "hist": [ 14, 2, 319, 30, 80 ] }
false
AI-Sweden/SuperLim
dalaj
validation
445
original_sentence
0
0
15
421
121.29213
110
57.3339
{ "bin_edges": [ 15, 56, 97, 138, 179, 220, 261, 302, 343, 384, 421 ], "hist": [ 30, 139, 138, 70, 36, 16, 13, 2, 0, 1 ] }
false
AI-Sweden/SuperLim
swesim_similarity
test
1,360
similarity
0
0
3
4
3.00074
3
0.02712
{ "bin_edges": [ 3, 4, 4 ], "hist": [ 1359, 1 ] }
false
AI-Sweden/SuperLim
swesim_similarity
test
1,360
word_1
0
0
1
16
5.86544
5
2.48
{ "bin_edges": [ 1, 3, 5, 7, 9, 11, 13, 15, 16 ], "hist": [ 33, 434, 468, 200, 153, 54, 12, 6 ] }
false
AI-Sweden/SuperLim
swesim_similarity
test
1,360
word_2
0
0
1
15
6.19265
6
2.48432
{ "bin_edges": [ 1, 3, 5, 7, 9, 11, 13, 15, 15 ], "hist": [ 20, 376, 425, 286, 163, 75, 12, 3 ] }
false
AI-Sweden/SuperLim
swewic
test
1,000
end_1
0
0
1
3
2.177
2
0.4959
{ "bin_edges": [ 1, 2, 3, 3 ], "hist": [ 50, 723, 227 ] }
false
AI-Sweden/SuperLim
swewic
test
1,000
end_2
0
0
1
3
2.166
2
0.46331
{ "bin_edges": [ 1, 2, 3, 3 ], "hist": [ 38, 758, 204 ] }
false
AI-Sweden/SuperLim
swewic
test
1,000
sentence_1
0
0
4
671
131.449
115
83.66924
{ "bin_edges": [ 4, 71, 138, 205, 272, 339, 406, 473, 540, 607, 671 ], "hist": [ 225, 395, 236, 85, 33, 14, 3, 5, 2, 2 ] }
false
AI-Sweden/SuperLim
swewic
test
1,000
sentence_2
0
0
11
671
120.327
107
74.73341
{ "bin_edges": [ 11, 78, 145, 212, 279, 346, 413, 480, 547, 614, 671 ], "hist": [ 319, 411, 180, 55, 17, 8, 5, 2, 2, 1 ] }
false
AI-Sweden/SuperLim
swewic
test
1,000
start_1
0
0
1
3
2.083
2
0.56607
{ "bin_edges": [ 1, 2, 3, 3 ], "hist": [ 122, 673, 205 ] }
false
AI-Sweden/SuperLim
swewic
test
1,000
start_2
0
0
1
3
2.056
2
0.55962
{ "bin_edges": [ 1, 2, 3, 3 ], "hist": [ 130, 684, 186 ] }
false
AI-Sweden/SuperLim
swewic
test
1,000
word_1
0
0
1
16
6.027
6
2.17785
{ "bin_edges": [ 1, 3, 5, 7, 9, 11, 13, 15, 16 ], "hist": [ 8, 243, 404, 216, 95, 21, 10, 3 ] }
false
AI-Sweden/SuperLim
swewic
test
1,000
word_2
0
0
2
15
6.071
6
2.15328
{ "bin_edges": [ 2, 4, 6, 8, 10, 12, 14, 15 ], "hist": [ 95, 336, 354, 137, 61, 12, 5 ] }
false
AI-Sweden/SuperLim
swediag
test
1,104
hypothesis
0
0
12
316
99.50996
89.5
58.1533
{ "bin_edges": [ 12, 43, 74, 105, 136, 167, 198, 229, 260, 291, 316 ], "hist": [ 178, 267, 220, 172, 107, 82, 41, 23, 8, 6 ] }
false
AI-Sweden/SuperLim
swediag
test
1,104
premise
0
0
12
316
99.51087
89.5
58.13798
{ "bin_edges": [ 12, 43, 74, 105, 136, 167, 198, 229, 260, 291, 316 ], "hist": [ 177, 268, 220, 173, 106, 83, 40, 23, 8, 6 ] }
false
AI-Sweden/SuperLim
swefaq
test
513
candidate_answer
0
0
31
1,904
319.06238
263
235.40062
{ "bin_edges": [ 31, 219, 407, 595, 783, 971, 1159, 1347, 1535, 1723, 1904 ], "hist": [ 208, 175, 73, 31, 16, 6, 2, 1, 0, 1 ] }
false
AI-Sweden/SuperLim
swefaq
test
513
correct_answer
0
0
31
1,904
319.06238
263
235.40062
{ "bin_edges": [ 31, 219, 407, 595, 783, 971, 1159, 1347, 1535, 1723, 1904 ], "hist": [ 208, 175, 73, 31, 16, 6, 2, 1, 0, 1 ] }
false
AI-Sweden/SuperLim
swefaq
test
513
question
0
0
11
229
70.51462
63
32.389
{ "bin_edges": [ 11, 33, 55, 77, 99, 121, 143, 165, 187, 209, 229 ], "hist": [ 31, 154, 164, 80, 46, 20, 6, 8, 3, 1 ] }
false
AI-Sweden/SuperLim
swefracas
test
304
premiss_1
0
0
17
115
44.71382
42
16.55814
{ "bin_edges": [ 17, 27, 37, 47, 57, 67, 77, 87, 97, 107, 115 ], "hist": [ 34, 52, 105, 54, 39, 6, 2, 4, 6, 2 ] }
false
AI-Sweden/SuperLim
swefracas
test
304
premiss_2
0
0
3
81
16.04605
3
16.71538
{ "bin_edges": [ 3, 11, 19, 27, 35, 43, 51, 59, 67, 75, 81 ], "hist": [ 165, 21, 39, 29, 26, 9, 8, 5, 1, 1 ] }
false
AI-Sweden/SuperLim
swefracas
test
304
question
0
0
13
157
42.68092
38
19.87685
{ "bin_edges": [ 13, 28, 43, 58, 73, 88, 103, 118, 133, 148, 157 ], "hist": [ 52, 119, 85, 31, 9, 2, 1, 0, 4, 1 ] }
false
AI-Sweden/SuperLim
swewsc
test
275
challenge_begin
0
0
2
3
2.51273
3
0.50075
{ "bin_edges": [ 2, 3, 3 ], "hist": [ 134, 141 ] }
false
AI-Sweden/SuperLim
swewsc
test
275
challenge_end
0
0
2
3
2.55273
3
0.49812
{ "bin_edges": [ 2, 3, 3 ], "hist": [ 123, 152 ] }
false
AI-Sweden/SuperLim
swewsc
test
275
passage
0
0
48
306
165.09818
165
66.37189
{ "bin_edges": [ 48, 74, 100, 126, 152, 178, 204, 230, 256, 282, 306 ], "hist": [ 12, 43, 60, 11, 16, 60, 26, 19, 15, 13 ] }
false
AI-Sweden/SuperLim
swewsc
test
275
response_begin
0
0
1
3
1.93818
2
0.63346
{ "bin_edges": [ 1, 2, 3, 3 ], "hist": [ 64, 164, 47 ] }
false
AI-Sweden/SuperLim
swewsc
test
275
response_end
0
0
1
3
2.05818
2
0.61924
{ "bin_edges": [ 1, 2, 3, 3 ], "hist": [ 45, 169, 61 ] }
false
AI-Sweden/SuperLim
swewsc
test
275
response_text
0
0
3
67
10.34545
8
6.70009
{ "bin_edges": [ 3, 10, 17, 24, 31, 38, 45, 52, 59, 66, 67 ], "hist": [ 149, 90, 24, 10, 1, 0, 0, 0, 0, 1 ] }
false
AI-Sweden/SuperLim
swepar
test
165
sentence_1
0
0
13
120
34.40606
32
15.37495
{ "bin_edges": [ 13, 24, 35, 46, 57, 68, 79, 90, 101, 112, 120 ], "hist": [ 41, 59, 38, 17, 4, 3, 2, 0, 0, 1 ] }
false
AI-Sweden/SuperLim
swepar
test
165
sentence_2
0
0
13
83
34.74545
33
13.13516
{ "bin_edges": [ 13, 21, 29, 37, 45, 53, 61, 69, 77, 83 ], "hist": [ 15, 46, 37, 30, 21, 9, 4, 1, 2 ] }
false
AI-Sweden/SuperLim
swepar
test
165
similarity_score
0
0
3
6
3.1697
3
0.54799
{ "bin_edges": [ 3, 4, 5, 6, 6 ], "hist": [ 149, 5, 10, 1 ] }
false
AI-Sweden/SuperLim
swesat
test
822
answer_1
0
0
4
65
12.81509
11
5.67576
{ "bin_edges": [ 4, 11, 18, 25, 32, 39, 46, 53, 60, 65 ], "hist": [ 300, 409, 90, 11, 6, 3, 0, 2, 1 ] }
false
AI-Sweden/SuperLim
swesat
test
822
answer_2
0
0
5
64
12.96959
12
5.79094
{ "bin_edges": [ 5, 11, 17, 23, 29, 35, 41, 47, 53, 59, 64 ], "hist": [ 292, 377, 115, 24, 7, 1, 1, 1, 3, 1 ] }
false
AI-Sweden/SuperLim
swesat
test
822
answer_3
0
0
5
56
13.46837
12
5.87224
{ "bin_edges": [ 5, 11, 17, 23, 29, 35, 41, 47, 53, 56 ], "hist": [ 274, 367, 123, 40, 9, 3, 3, 1, 2 ] }
false
AI-Sweden/SuperLim
swesat
test
822
answer_4
0
0
5
53
13.30779
12
5.64814
{ "bin_edges": [ 5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 53 ], "hist": [ 178, 403, 151, 60, 15, 8, 2, 1, 1, 3 ] }
false
AI-Sweden/SuperLim
swesat
test
822
answer_5
0
0
5
66
13.39538
12
5.94273
{ "bin_edges": [ 5, 12, 19, 26, 33, 40, 47, 54, 61, 66 ], "hist": [ 380, 325, 91, 15, 4, 3, 2, 1, 1 ] }
false
AI-Sweden/SuperLim
swesat
test
822
target_item
0
0
3
28
8.58637
8
2.88584
{ "bin_edges": [ 3, 6, 9, 12, 15, 18, 21, 24, 27, 28 ], "hist": [ 101, 334, 280, 84, 13, 6, 3, 0, 1 ] }
false
AI-Sweden/SuperLim
swesim_relatedness
test
1,360
relatedness
0
0
3
4
3.06324
3
0.24348
{ "bin_edges": [ 3, 4, 4 ], "hist": [ 1274, 86 ] }
false
AI-Sweden/SuperLim
swesim_relatedness
test
1,360
word_1
0
0
1
16
5.86544
5
2.48
{ "bin_edges": [ 1, 3, 5, 7, 9, 11, 13, 15, 16 ], "hist": [ 33, 434, 468, 200, 153, 54, 12, 6 ] }
false
AI-Sweden/SuperLim
swesim_relatedness
test
1,360
word_2
0
0
1
15
6.19265
6
2.48432
{ "bin_edges": [ 1, 3, 5, 7, 9, 11, 13, 15, 15 ], "hist": [ 20, 376, 425, 286, 163, 75, 12, 3 ] }
false
AI-Sweden/SuperLim
sweana
test
20,638
a
0
0
2
12
5.73607
6
1.91762
{ "bin_edges": [ 2, 4, 6, 8, 10, 12, 12 ], "hist": [ 2459, 7465, 6773, 3145, 708, 88 ] }
false
AI-Sweden/SuperLim
sweana
test
20,638
b
0
0
3
15
6.90915
7
1.97801
{ "bin_edges": [ 3, 5, 7, 9, 11, 13, 15, 15 ], "hist": [ 2199, 6568, 7892, 2861, 1014, 61, 43 ] }
false
AI-Sweden/SuperLim
sweana
test
20,638
c
0
0
2
12
5.73602
6
1.91767
{ "bin_edges": [ 2, 4, 6, 8, 10, 12, 12 ], "hist": [ 2459, 7466, 6772, 3145, 708, 88 ] }
false
AI-Sweden/SuperLim
sweana
test
20,638
d
0
0
3
15
6.90915
7
1.97801
{ "bin_edges": [ 3, 5, 7, 9, 11, 13, 15, 15 ], "hist": [ 2199, 6568, 7892, 2861, 1014, 61, 43 ] }
false
AI-Sweden/SuperLim
swewgr
test
624
challenge
0
0
54
57
54.30128
54
0.75525
{ "bin_edges": [ 54, 55, 56, 57, 57 ], "hist": [ 512, 72, 4, 36 ] }
false
AI-Sweden/SuperLim
swewgr
test
624
responses
0
0
151
174
157.0625
156
4.32815
{ "bin_edges": [ 151, 154, 157, 160, 163, 166, 169, 172, 174 ], "hist": [ 135, 183, 153, 72, 60, 15, 0, 6 ] }
false
AI-Sweden/SuperLim
swewgr
test
624
text
0
0
47
131
82.91186
81
16.55324
{ "bin_edges": [ 47, 56, 65, 74, 83, 92, 101, 110, 119, 128, 131 ], "hist": [ 21, 56, 98, 166, 108, 76, 51, 26, 18, 4 ] }
false
0n1xus/codexglue
code-completion-token-py150
test
50,000
code
0
0
9
529,509
4,570.83654
1,706.5
9,571.26873
{ "bin_edges": [ 9, 52960, 105911, 158862, 211813, 264764, 317715, 370666, 423617, 476568, 529509 ], "hist": [ 49701, 245, 37, 15, 1, 0, 0, 0, 0, 1 ] }
false
0n1xus/codexglue
code-to-code-trans
validation
499
cs_code
0
0
38
1,034
246.07615
213
168.09728
{ "bin_edges": [ 38, 138, 238, 338, 438, 538, 638, 738, 838, 938, 1034 ], "hist": [ 181, 85, 89, 95, 27, 7, 6, 5, 2, 2 ] }
false
0n1xus/codexglue
code-to-code-trans
validation
499
java_code
0
0
31
1,066
190.60922
163
151.66813
{ "bin_edges": [ 31, 135, 239, 343, 447, 551, 655, 759, 863, 967, 1066 ], "hist": [ 185, 217, 47, 13, 18, 6, 4, 5, 3, 1 ] }
false
Babelscape/rebel-dataset
REBEL
validation
172,860
context
0
0
10
1,467
135.17453
123
62.60313
{ "bin_edges": [ 10, 156, 302, 448, 594, 740, 886, 1032, 1178, 1324, 1467 ], "hist": [ 122568, 46761, 3133, 320, 57, 7, 6, 4, 2, 2 ] }
false
Babelscape/rebel-dataset
REBEL
validation
172,860
id
0
0
5
13
10.14023
10
0.75552
{ "bin_edges": [ 5, 6, 7, 8, 9, 10, 11, 12, 13, 13 ], "hist": [ 4, 129, 531, 3527, 22074, 92813, 52073, 1702, 7 ] }
false
Babelscape/rebel-dataset
REBEL
validation
172,860
title
0
0
1
130
17.25084
15
8.8586
{ "bin_edges": [ 1, 14, 27, 40, 53, 66, 79, 92, 105, 118, 130 ], "hist": [ 66902, 84257, 16361, 4396, 795, 110, 25, 10, 3, 1 ] }
false
Babelscape/rebel-dataset
REBEL
validation
172,860
triplets
0
0
34
3,690
165.68691
110
158.3354
{ "bin_edges": [ 34, 400, 766, 1132, 1498, 1864, 2230, 2596, 2962, 3328, 3690 ], "hist": [ 159585, 10377, 2734, 104, 37, 14, 3, 5, 0, 1 ] }
false
CodedotAI/code_clippy_github
C-all
train
257,000
code_text
0
0
879
995,217
19,402.42781
9,712
33,156.10603
{ "bin_edges": [ 879, 100313, 199747, 299181, 398615, 498049, 597483, 696917, 796351, 895785, 995217 ], "hist": [ 251359, 4542, 645, 218, 94, 55, 17, 22, 23, 25 ] }
true
CodedotAI/code_clippy_github
C-all
train
257,000
file_path
0
0
4
176
30.67998
29
11.00656
{ "bin_edges": [ 4, 22, 40, 58, 76, 94, 112, 130, 148, 166, 176 ], "hist": [ 42891, 174603, 33998, 3772, 1097, 399, 174, 46, 17, 3 ] }
true
CodedotAI/code_clippy_github
C-all
train
257,000
repo_name
0
0
6
90
27.71249
27
9.51241
{ "bin_edges": [ 6, 15, 24, 33, 42, 51, 60, 69, 78, 87, 90 ], "hist": [ 19188, 77971, 76677, 62039, 18858, 2008, 257, 1, 0, 1 ] }
true
0n1xus/codexglue
code-to-code-trans
train
10,295
cs_code
0
0
27
1,379
230.63856
183
162.32265
{ "bin_edges": [ 27, 163, 299, 435, 571, 707, 843, 979, 1115, 1251, 1379 ], "hist": [ 4716, 1809, 2993, 453, 167, 88, 37, 21, 7, 4 ] }
false
0n1xus/codexglue
code-to-code-trans
train
10,295
java_code
0
0
28
1,397
179.63021
151
142.26297
{ "bin_edges": [ 28, 165, 302, 439, 576, 713, 850, 987, 1124, 1261, 1397 ], "hist": [ 5860, 3292, 558, 280, 155, 78, 41, 16, 12, 3 ] }
false