grg commited on
Commit
e83464c
1 Parent(s): 348bacd

New models: Ministral and models from BeaverAI.

Browse files
Files changed (27) hide show
  1. static/leaderboard.csv +40 -35
  2. static/models_data/Cydonia-22B-v1.2/cfa_metrics.csv +10 -0
  3. static/models_data/Cydonia-22B-v1.2/matrix.svg +1974 -0
  4. static/models_data/Cydonia-22B-v1.2/model_detail.html +4 -0
  5. static/models_data/Cydonia-22B-v1.2/ranks.svg +0 -0
  6. static/models_data/Cydonia-22B-v1.2/structure.svg +0 -0
  7. static/models_data/Ministral-8B-Instruct-2410/cfa_metrics.csv +10 -0
  8. static/models_data/Ministral-8B-Instruct-2410/matrix.svg +1965 -0
  9. static/models_data/Ministral-8B-Instruct-2410/model_detail.html +6 -0
  10. static/models_data/Ministral-8B-Instruct-2410/ranks.svg +0 -0
  11. static/models_data/Ministral-8B-Instruct-2410/structure.svg +0 -0
  12. static/models_data/Ministrations-8B-v1/cfa_metrics.csv +10 -0
  13. static/models_data/Ministrations-8B-v1/matrix.svg +1946 -0
  14. static/models_data/Ministrations-8B-v1/model_detail.html +6 -0
  15. static/models_data/Ministrations-8B-v1/ranks.svg +0 -0
  16. static/models_data/Ministrations-8B-v1/structure.svg +0 -0
  17. static/models_data/Nautilus-70B-v0.1/cfa_metrics.csv +10 -0
  18. static/models_data/Nautilus-70B-v0.1/matrix.svg +1948 -0
  19. static/models_data/Nautilus-70B-v0.1/model_detail.html +6 -0
  20. static/models_data/Nautilus-70B-v0.1/ranks.svg +0 -0
  21. static/models_data/Nautilus-70B-v0.1/structure.svg +0 -0
  22. static/models_data/cardinal.svg +713 -515
  23. static/models_data/hermes_3_llama_3.1_70b_instruct/cfa_metrics.csv +10 -0
  24. static/models_data/hermes_3_llama_3.1_70b_instruct/matrix.svg +1951 -0
  25. static/models_data/hermes_3_llama_3.1_70b_instruct/ranks.svg +0 -0
  26. static/models_data/hermes_3_llama_3.1_70b_instruct/structure.svg +0 -0
  27. static/models_data/ordinal.svg +834 -636
static/leaderboard.csv CHANGED
@@ -1,36 +1,41 @@
1
  Model,Ordinal (Win rate),Cardinal (Score),RO Stability,Stress,CFI,SRMR,RMSEA
2
- llama-3.1-nemotron-70B-instruct,0.8696895424836603,0.751782963334874,0.7174031652092134,0.16209339860230643,0.7561694444444446,0.21189444444444439,0.23753055555555547
3
- hermes_3_llama_3.1_8b,0.4534313725490196,0.4117221943281449,0.16520527634373441,0.25311021741644446,0.5822944444444444,0.3526722222222223,0.3435555555555555
4
- gemma-2-2b-it,0.3635620915032679,0.3309858600428668,0.14746606707946294,0.263080165752695,0.40932500000000005,0.550475,0.5377472222222222
5
- gemma-2-9b-it,0.7287581699346405,0.6020857503693501,0.43782539244147833,0.20116278903333318,0.7543666666666667,0.23989444444444452,0.24792499999999995
6
- gemma-2-27b-it,0.6225490196078431,0.5270946699366518,0.3917304045417486,0.2058170364515589,0.5997861111111111,0.37121111111111116,0.37292222222222227
7
- phi-3-mini-128k-instruct,0.31862745098039214,0.32984992817164005,0.039299993295009855,0.281800547806919,0.5861361111111111,0.42524166666666674,0.3974944444444444
8
- phi-3-medium-128k-instruct,0.3198529411764706,0.30802986933853177,0.09692037989916814,0.2651981204439735,0.43025555555555556,0.5503277777777777,0.5381722222222222
9
- phi-3.5-mini-instruct,0.23651960784313728,0.2680653144619754,0.0361229186530762,0.28422749224983457,0.40715555555555555,0.5721138888888888,0.5507833333333333
10
- phi-3.5-MoE-instruct,0.38480392156862747,0.36128192067041315,0.10985291697837646,0.2739229692168671,0.5530944444444444,0.4248777777777778,0.40345
11
- Mistral-7B-Instruct-v0.1,0.2128267973856209,0.26609566354811315,0.027216280472015988,0.2829498135031582,0.38917777777777773,0.5561138888888888,0.530213888888889
12
- Mistral-7B-Instruct-v0.2,0.3415032679738562,0.32133832899241477,0.14417876497818388,0.265188983528973,0.3802722222222222,0.5727305555555555,0.5483611111111111
13
- Mistral-7B-Instruct-v0.3,0.25,0.26572479479146804,0.07960539866974455,0.2742399030139009,0.31385,0.6241,0.6081333333333333
14
- Mixtral-8x7B-Instruct-v0.1,0.4334150326797386,0.3819009850972602,0.21473356319081474,0.2624402608740656,0.45275,0.5034666666666667,0.4905694444444444
15
- Mixtral-8x22B-Instruct-v0.1,0.3349673202614379,0.31529864972153404,0.1414001940345544,0.2548838005881672,0.3772361111111111,0.5810888888888889,0.5844750000000001
16
- command_r_plus,0.5755718954248366,0.4995356672762356,0.3429686514651868,0.23811982320641845,0.6033000000000001,0.3740166666666668,0.3667527777777777
17
- llama_3_8b_instruct,0.48815359477124187,0.4295836112681494,0.24527785038654715,0.245806400289881,0.5498222222222222,0.42656388888888896,0.42189444444444446
18
- llama_3_70b_instruct,0.770016339869281,0.6839540364836003,0.607020698814379,0.18525883672204868,0.7210055555555557,0.2346083333333333,0.25758888888888887
19
- llama_3.1_8b_instruct,0.5637254901960785,0.4786874422110324,0.4295080949846363,0.22060228669473025,0.4305722222222223,0.5455027777777777,0.553
20
- llama_3.1_70b_instruct,0.8112745098039216,0.7172545013390067,0.691365862744007,0.1709718847084183,0.6979472222222223,0.2636777777777777,0.2907250000000001
21
- llama_3.1_405b_instruct_4bit,0.7283496732026143,0.6490864350383405,0.7232098126552619,0.1702199925365422,0.4875722222222223,0.4963444444444445,0.5211555555555556
22
- llama_3.2_1b_instruct,0.2107843137254902,0.2522036562381785,0.027192115495770382,0.29255310096654275,0.37450000000000006,0.5990222222222223,0.5740638888888888
23
- llama_3.2_3b_instruct,0.38929738562091504,0.3615804465210719,0.13450325180647235,0.27485276839064654,0.5017,0.44956666666666667,0.4226500000000001
24
- Qwen2-7B-Instruct,0.4035947712418301,0.36370005127542027,0.25108519506513916,0.25776537005719313,0.3560861111111111,0.6009722222222222,0.5920888888888889
25
- Qwen2-72B-Instruct,0.5690359477124183,0.5461212335522644,0.6465993243020925,0.20297742879025626,0.3045,0.6543138888888889,0.6646361111111111
26
- Qwen2.5-0.5B-Instruct,0.2822712418300653,0.3005554090516966,0.002970456550606876,0.2928913315666324,0.5371250000000001,0.44709722222222226,0.404575
27
- Qwen2.5-7B-Instruct,0.6070261437908496,0.5163098181421168,0.333554494486959,0.2505866550331236,0.6473694444444444,0.30400277777777773,0.29651944444444434
28
- Qwen2.5-32B-Instruct,0.7263071895424837,0.656917654644944,0.6724190751477237,0.1806656189868978,0.5603222222222223,0.40237500000000004,0.41161666666666663
29
- Qwen2.5-72B-Instruct,0.8149509803921569,0.7104489147495714,0.6974116787371809,0.16176650806326276,0.6734583333333333,0.2993,0.3184472222222223
30
- gpt-3.5-turbo-0125,0.23856209150326796,0.28218378886707396,0.08240359836763214,0.28728574920060357,0.3873055555555555,0.599925,0.572238888888889
31
- gpt-4o-0513,0.6813725490196078,0.5989532974661671,0.5122163952167618,0.19201420113771173,0.6235416666666667,0.34458611111111115,0.3441805555555555
32
- gpt-4o-mini-2024-07-18,0.36519607843137253,0.3418785071827972,0.13575309046266867,0.2707065266105181,0.44214722222222214,0.5004583333333332,0.47896666666666665
33
- Mistral-Large-Instruct-2407,0.8370098039215687,0.7374229691535793,0.7644582301049158,0.16944638941325085,0.6510750000000001,0.31028611111111104,0.3297916666666667
34
- Mistral-Nemo-Instruct-2407,0.5759803921568627,0.5262426956484347,0.4414072595011627,0.21142636170606344,0.5161,0.42923055555555545,0.43113055555555546
35
- Mistral-Small-Instruct-2409,0.766748366013072,0.6890378862258165,0.6416815833333804,0.1894343546381,0.6840472222222221,0.2601583333333335,0.2888777777777778
36
- dummy,0.1830065359477124,0.2291015386716794,-0.009004148398032956,0.2928877637010999,0.3755222222222222,0.622275,0.5915305555555557
 
 
 
 
 
 
1
  Model,Ordinal (Win rate),Cardinal (Score),RO Stability,Stress,CFI,SRMR,RMSEA
2
+ Ministrations-8B-v1,0.5954415954415954,0.5629212654972728,0.5060368869776407,0.22452742393237857,0.5590166666666667,0.42490000000000006,0.4303666666666667
3
+ Cydonia-22B-v1.2,0.7069088319088319,0.6554547368791501,0.6193557112491432,0.19415994996337282,0.6361166666666667,0.3338083333333334,0.3419333333333332
4
+ Nautilus-70B-v0.1,0.7834757834757835,0.7074335111695115,0.6326443266767067,0.1814758840174009,0.7506972222222221,0.20927777777777778,0.23105277777777777
5
+ Ministral-8B-Instruct-2410,0.5473646723646723,0.5196862499633303,0.4120903823316509,0.24045486295329566,0.5791333333333334,0.4199666666666668,0.4095833333333334
6
+ llama-3.1-nemotron-70B-instruct,0.8714387464387464,0.751782963334874,0.7174031652092134,0.16209339860230643,0.7561694444444446,0.21189444444444439,0.23753055555555547
7
+ hermes_3_llama_3.1_70b,0.5352564102564101,0.47953495168653426,0.2589918470169178,0.22941277457539844,0.6493000000000001,0.3096972222222222,0.3098777777777778
8
+ hermes_3_llama_3.1_8b,0.4223646723646724,0.4117221943281449,0.16520527634373441,0.25311021741644446,0.5822944444444444,0.3526722222222223,0.3435555555555555
9
+ gemma-2-2b-it,0.33012820512820507,0.3309858600428668,0.14746606707946294,0.263080165752695,0.40932500000000005,0.550475,0.5377472222222222
10
+ gemma-2-9b-it,0.707977207977208,0.6020857503693501,0.43782539244147833,0.20116278903333318,0.7543666666666667,0.23989444444444452,0.24792499999999995
11
+ gemma-2-27b-it,0.5975783475783476,0.5270946699366518,0.3917304045417486,0.2058170364515589,0.5997861111111111,0.37121111111111116,0.37292222222222227
12
+ phi-3-mini-128k-instruct,0.30128205128205127,0.32984992817164005,0.039299993295009855,0.281800547806919,0.5861361111111111,0.42524166666666674,0.3974944444444444
13
+ phi-3-medium-128k-instruct,0.2977207977207977,0.30802986933853177,0.09692037989916814,0.2651981204439735,0.43025555555555556,0.5503277777777777,0.5381722222222222
14
+ phi-3.5-mini-instruct,0.21972934472934474,0.2680653144619754,0.0361229186530762,0.28422749224983457,0.40715555555555555,0.5721138888888888,0.5507833333333333
15
+ phi-3.5-MoE-instruct,0.35826210826210825,0.36128192067041315,0.10985291697837646,0.2739229692168671,0.5530944444444444,0.4248777777777778,0.40345
16
+ Mistral-7B-Instruct-v0.1,0.198005698005698,0.26609566354811315,0.027216280472015988,0.2829498135031582,0.38917777777777773,0.5561138888888888,0.530213888888889
17
+ Mistral-7B-Instruct-v0.2,0.3148148148148148,0.32133832899241477,0.14417876497818388,0.265188983528973,0.3802722222222222,0.5727305555555555,0.5483611111111111
18
+ Mistral-7B-Instruct-v0.3,0.23361823361823364,0.26572479479146804,0.07960539866974455,0.2742399030139009,0.31385,0.6241,0.6081333333333333
19
+ Mixtral-8x7B-Instruct-v0.1,0.3970797720797721,0.3819009850972602,0.21473356319081474,0.2624402608740656,0.45275,0.5034666666666667,0.4905694444444444
20
+ Mixtral-8x22B-Instruct-v0.1,0.3108974358974359,0.31529864972153404,0.1414001940345544,0.2548838005881672,0.3772361111111111,0.5810888888888889,0.5844750000000001
21
+ command_r_plus,0.5491452991452992,0.4995356672762356,0.3429686514651868,0.23811982320641845,0.6033000000000001,0.3740166666666668,0.3667527777777777
22
+ llama_3_8b_instruct,0.4565527065527065,0.4295836112681494,0.24527785038654715,0.245806400289881,0.5498222222222222,0.42656388888888896,0.42189444444444446
23
+ llama_3_70b_instruct,0.7589031339031339,0.6839540364836003,0.607020698814379,0.18525883672204868,0.7210055555555557,0.2346083333333333,0.25758888888888887
24
+ llama_3.1_8b_instruct,0.5373931623931624,0.4786874422110324,0.4295080949846363,0.22060228669473025,0.4305722222222223,0.5455027777777777,0.553
25
+ llama_3.1_70b_instruct,0.805911680911681,0.7172545013390067,0.691365862744007,0.1709718847084183,0.6979472222222223,0.2636777777777777,0.2907250000000001
26
+ llama_3.1_405b_instruct_4bit,0.7272079772079771,0.6490864350383405,0.7232098126552619,0.1702199925365422,0.4875722222222223,0.4963444444444445,0.5211555555555556
27
+ llama_3.2_1b_instruct,0.19729344729344728,0.2522036562381785,0.027192115495770382,0.29255310096654275,0.37450000000000006,0.5990222222222223,0.5740638888888888
28
+ llama_3.2_3b_instruct,0.358974358974359,0.3615804465210719,0.13450325180647235,0.27485276839064654,0.5017,0.44956666666666667,0.4226500000000001
29
+ Qwen2-7B-Instruct,0.37037037037037035,0.36370005127542027,0.25108519506513916,0.25776537005719313,0.3560861111111111,0.6009722222222222,0.5920888888888889
30
+ Qwen2-72B-Instruct,0.5641025641025641,0.5461212335522644,0.6465993243020925,0.20297742879025626,0.3045,0.6543138888888889,0.6646361111111111
31
+ Qwen2.5-0.5B-Instruct,0.2706552706552707,0.3005554090516966,0.002970456550606876,0.2928913315666324,0.5371250000000001,0.44709722222222226,0.404575
32
+ Qwen2.5-7B-Instruct,0.5680199430199431,0.5163098181421168,0.333554494486959,0.2505866550331236,0.6473694444444444,0.30400277777777773,0.29651944444444434
33
+ Qwen2.5-32B-Instruct,0.7272079772079771,0.656917654644944,0.6724190751477237,0.1806656189868978,0.5603222222222223,0.40237500000000004,0.41161666666666663
34
+ Qwen2.5-72B-Instruct,0.8108974358974359,0.7104489147495714,0.6974116787371809,0.16176650806326276,0.6734583333333333,0.2993,0.3184472222222223
35
+ gpt-3.5-turbo-0125,0.21723646723646722,0.28218378886707396,0.08240359836763214,0.28728574920060357,0.3873055555555555,0.599925,0.572238888888889
36
+ gpt-4o-0513,0.6688034188034188,0.5989532974661671,0.5122163952167618,0.19201420113771173,0.6235416666666667,0.34458611111111115,0.3441805555555555
37
+ gpt-4o-mini-2024-07-18,0.33547008547008544,0.3418785071827972,0.13575309046266867,0.2707065266105181,0.44214722222222214,0.5004583333333332,0.47896666666666665
38
+ Mistral-Large-Instruct-2407,0.826923076923077,0.7374229691535793,0.7644582301049158,0.16944638941325085,0.6510750000000001,0.31028611111111104,0.3297916666666667
39
+ Mistral-Nemo-Instruct-2407,0.5487891737891738,0.5262426956484347,0.4414072595011627,0.21142636170606344,0.5161,0.42923055555555545,0.43113055555555546
40
+ Mistral-Small-Instruct-2409,0.75997150997151,0.6890378862258165,0.6416815833333804,0.1894343546381,0.6840472222222221,0.2601583333333335,0.2888777777777778
41
+ dummy,0.1727207977207977,0.2291015386716794,-0.009004148398032956,0.2928877637010999,0.3755222222222222,0.622275,0.5915305555555557
static/models_data/Cydonia-22B-v1.2/cfa_metrics.csv ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ Context chunk,CFI,TLI,SRMR,RMSEA
2
+ chunk_0,0.8068,0.739625,0.1121,0.14629999999999999
3
+ chunk_1,0.6067499999999999,0.563925,0.345375,0.35409999999999997
4
+ chunk_2,0.403675,0.376425,0.56435,0.55855
5
+ chunk_3,0.6270249999999999,0.5892,0.32565,0.34327500000000005
6
+ chunk_4,0.8815999999999999,0.840025,0.10122500000000001,0.117225
7
+ chunk_chess_0,0.6381,0.604975,0.34,0.35275
8
+ chunk_grammar_1,0.865825,0.8127499999999999,0.104025,0.11407499999999998
9
+ chunk_no_conv,0.697075,0.6856500000000001,0.321125,0.30562500000000004
10
+ chunk_svs_no_conv,0.1982,0.180525,0.7904249999999999,0.7855
static/models_data/Cydonia-22B-v1.2/matrix.svg ADDED
static/models_data/Cydonia-22B-v1.2/model_detail.html ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ <p>
2
+ This open-source model was created by <a target="_blank" href="https://huggingface.co/BeaverAI"> TheDrummer (BeaverAI) <a>.
3
+ The 22B model is available on the huggingface hub: <a target="_blank" href="https://huggingface.co/TheDrummer/Cydonia-22B-v1.2">https://huggingface.co/TheDrummer/Cydonia-22B-v1.2</a>.
4
+ </p>
static/models_data/Cydonia-22B-v1.2/ranks.svg ADDED
static/models_data/Cydonia-22B-v1.2/structure.svg ADDED
static/models_data/Ministral-8B-Instruct-2410/cfa_metrics.csv ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ Context chunk,CFI,TLI,SRMR,RMSEA
2
+ chunk_0,0.813025,0.74795,0.104325,0.11485
3
+ chunk_1,0.4709,0.456125,0.54285,0.53905
4
+ chunk_2,0.950475,1.07045,0.085475,0.037775
5
+ chunk_3,0.902625,0.911075,0.08812500000000001,0.078075
6
+ chunk_4,0.666,0.636125,0.32995,0.3293
7
+ chunk_chess_0,0.46555,0.44532499999999997,0.539875,0.54855
8
+ chunk_grammar_1,0.218225,0.20795,0.776475,0.758975
9
+ chunk_no_conv,0.4754,0.47455,0.5455749999999999,0.529675
10
+ chunk_svs_no_conv,0.25,0.25045,0.76705,0.75
static/models_data/Ministral-8B-Instruct-2410/matrix.svg ADDED
static/models_data/Ministral-8B-Instruct-2410/model_detail.html ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ <p>
2
+ This open-source model was created by <a target="_blank" href="https://mistral.ai/">Mistral AI<a>.
3
+ You can find the release blog post <a target="_blank" href="https://mistral.ai/news/ministraux/">here</a>.
4
+ The model is available on the huggingface hub: <a target="_blank" href="https://huggingface.co/mistralai/Ministral-8B-Instruct-2410">https://huggingface.co/mistralai/Ministral-8B-Instruct-2410</a>.
5
+ The model has 8B parameters, and supports up to 128K token contexts.
6
+ </p>
static/models_data/Ministral-8B-Instruct-2410/ranks.svg ADDED
static/models_data/Ministral-8B-Instruct-2410/structure.svg ADDED
static/models_data/Ministrations-8B-v1/cfa_metrics.csv ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ Context chunk,CFI,TLI,SRMR,RMSEA
2
+ chunk_0,0.6514249999999999,0.6182000000000001,0.339675,0.3367
3
+ chunk_1,0.48062499999999997,0.5191,0.5442750000000001,0.5115000000000001
4
+ chunk_2,0.418025,0.412975,0.5533750000000001,0.5383249999999999
5
+ chunk_3,0.44335,0.41459999999999997,0.5395,0.56965
6
+ chunk_4,0.8795,0.8342,0.10127499999999999,0.12219999999999999
7
+ chunk_chess_0,0.690575,0.667175,0.3052,0.33315
8
+ chunk_grammar_1,0.419425,0.394525,0.556825,0.550925
9
+ chunk_no_conv,0.4447,0.42155,0.552075,0.559525
10
+ chunk_svs_no_conv,0.603525,0.567325,0.3319,0.35132500000000005
static/models_data/Ministrations-8B-v1/matrix.svg ADDED
static/models_data/Ministrations-8B-v1/model_detail.html ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ <p>
2
+ This open-source model was created by <a target="_blank" href="https://huggingface.co/BeaverAI"> TheDrummer (BeaverAI) <a>.
3
+ The model is available on the huggingface hub: <a target="_blank" href="https://huggingface.co/TheDrummer/Ministrations-8B-v1">https://huggingface.co/TheDrummer/Ministrations-8B-v1</a>.
4
+ It is based od Ministral <a target="_blank" href="https://huggingface.co/mistralai/Ministral-8B-Instruct-2410">Ministral</a>.
5
+ The model has 8B parameters, and supports up to 128K token contexts.
6
+ </p>
static/models_data/Ministrations-8B-v1/ranks.svg ADDED
static/models_data/Ministrations-8B-v1/structure.svg ADDED
static/models_data/Nautilus-70B-v0.1/cfa_metrics.csv ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ Context chunk,CFI,TLI,SRMR,RMSEA
2
+ chunk_0,0.628875,0.591275,0.3249,0.32442499999999996
3
+ chunk_1,0.822175,0.745225,0.11009999999999999,0.13985
4
+ chunk_2,0.897,0.86425,0.103925,0.1001
5
+ chunk_3,0.6444,0.6114499999999999,0.329275,0.327175
6
+ chunk_4,0.83545,0.772575,0.102525,0.154
7
+ chunk_chess_0,0.599925,0.5493,0.341425,0.37892500000000007
8
+ chunk_grammar_1,0.8415250000000001,0.787325,0.108175,0.14775
9
+ chunk_no_conv,0.833025,0.76905,0.1275,0.1716
10
+ chunk_svs_no_conv,0.6538999999999999,0.627275,0.335675,0.33564999999999995
static/models_data/Nautilus-70B-v0.1/matrix.svg ADDED
static/models_data/Nautilus-70B-v0.1/model_detail.html ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ <p>
2
+ This open-source model was created by <a target="_blank" href="https://huggingface.co/BeaverAI"> TheDrummer (BeaverAI) <a>.
3
+ The model is available on the huggingface hub: <a target="_blank" href="https://huggingface.co/TheDrummer/Nautilus-70B-v0.1">https://huggingface.co/TheDrummer/Nautilus-70B-v0.1</a>.
4
+ It is based od Nemotron <a target="_blank" href="https://huggingface.co/nvidia/Llama-3.1-Nemotron-70B-Instruct">Nemotron</a>.
5
+ The model has 70B parameters, and supports up to 128K token contexts.
6
+ </p>
static/models_data/Nautilus-70B-v0.1/ranks.svg ADDED
static/models_data/Nautilus-70B-v0.1/structure.svg ADDED
static/models_data/cardinal.svg CHANGED
static/models_data/hermes_3_llama_3.1_70b_instruct/cfa_metrics.csv ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ Context chunk,CFI,TLI,SRMR,RMSEA
2
+ chunk_0,0.636425,0.69835,0.326575,0.30785
3
+ chunk_1,0.824375,0.7826500000000001,0.10675,0.058825
4
+ chunk_2,0.7808250000000001,0.697175,0.101125,0.090375
5
+ chunk_3,0.657275,0.6215250000000001,0.316625,0.3185
6
+ chunk_4,0.445575,0.425375,0.54375,0.5425
7
+ chunk_chess_0,0.9093,0.8766750000000001,0.085125,0.10762500000000001
8
+ chunk_grammar_1,0.184675,0.1678,0.7832749999999999,0.776275
9
+ chunk_no_conv,0.8501000000000001,0.797825,0.09914999999999999,0.15445
10
+ chunk_svs_no_conv,0.55515,0.5024,0.4249,0.4325
static/models_data/hermes_3_llama_3.1_70b_instruct/matrix.svg ADDED
static/models_data/hermes_3_llama_3.1_70b_instruct/ranks.svg ADDED
static/models_data/hermes_3_llama_3.1_70b_instruct/structure.svg ADDED
static/models_data/ordinal.svg CHANGED