File size: 6,373 Bytes
f3bb1db
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
{
    "epoch": 15.0,
    "eval_AerospaceManufacturer_f1": 0.7682119205298014,
    "eval_AerospaceManufacturer_number": 138,
    "eval_AerospaceManufacturer_precision": 0.7073170731707317,
    "eval_AerospaceManufacturer_recall": 0.8405797101449275,
    "eval_AnatomicalStructure_f1": 0.7006369426751592,
    "eval_AnatomicalStructure_number": 227,
    "eval_AnatomicalStructure_precision": 0.6762295081967213,
    "eval_AnatomicalStructure_recall": 0.7268722466960352,
    "eval_ArtWork_f1": 0.5801526717557252,
    "eval_ArtWork_number": 131,
    "eval_ArtWork_precision": 0.5801526717557252,
    "eval_ArtWork_recall": 0.5801526717557252,
    "eval_Artist_f1": 0.7747237177670728,
    "eval_Artist_number": 1722,
    "eval_Artist_precision": 0.7565024903154399,
    "eval_Artist_recall": 0.7938443670150988,
    "eval_Athlete_f1": 0.7408906882591093,
    "eval_Athlete_number": 719,
    "eval_Athlete_precision": 0.7195281782437746,
    "eval_Athlete_recall": 0.7635605006954103,
    "eval_CarManufacturer_f1": 0.742857142857143,
    "eval_CarManufacturer_number": 159,
    "eval_CarManufacturer_precision": 0.680628272251309,
    "eval_CarManufacturer_recall": 0.8176100628930818,
    "eval_Cleric_f1": 0.5868945868945868,
    "eval_Cleric_number": 201,
    "eval_Cleric_precision": 0.6866666666666666,
    "eval_Cleric_recall": 0.5124378109452736,
    "eval_Clothing_f1": 0.6015037593984963,
    "eval_Clothing_number": 128,
    "eval_Clothing_precision": 0.5797101449275363,
    "eval_Clothing_recall": 0.625,
    "eval_Disease_f1": 0.6504854368932038,
    "eval_Disease_number": 198,
    "eval_Disease_precision": 0.6261682242990654,
    "eval_Disease_recall": 0.6767676767676768,
    "eval_Drink_f1": 0.7682119205298013,
    "eval_Drink_number": 143,
    "eval_Drink_precision": 0.7295597484276729,
    "eval_Drink_recall": 0.8111888111888111,
    "eval_Facility_f1": 0.6799620132953467,
    "eval_Facility_number": 497,
    "eval_Facility_precision": 0.6438848920863309,
    "eval_Facility_recall": 0.7203219315895373,
    "eval_Food_f1": 0.5968586387434555,
    "eval_Food_number": 214,
    "eval_Food_precision": 0.6785714285714286,
    "eval_Food_recall": 0.5327102803738317,
    "eval_HumanSettlement_f1": 0.8691834942932396,
    "eval_HumanSettlement_number": 1689,
    "eval_HumanSettlement_precision": 0.859375,
    "eval_HumanSettlement_recall": 0.8792184724689165,
    "eval_MedicalProcedure_f1": 0.7035830618892507,
    "eval_MedicalProcedure_number": 142,
    "eval_MedicalProcedure_precision": 0.6545454545454545,
    "eval_MedicalProcedure_recall": 0.7605633802816901,
    "eval_Medication/Vaccine_f1": 0.7409200968523003,
    "eval_Medication/Vaccine_number": 200,
    "eval_Medication/Vaccine_precision": 0.7183098591549296,
    "eval_Medication/Vaccine_recall": 0.765,
    "eval_MusicalGRP_f1": 0.7399741267787839,
    "eval_MusicalGRP_number": 372,
    "eval_MusicalGRP_precision": 0.713216957605985,
    "eval_MusicalGRP_recall": 0.7688172043010753,
    "eval_MusicalWork_f1": 0.7275031685678074,
    "eval_MusicalWork_number": 407,
    "eval_MusicalWork_precision": 0.7513089005235603,
    "eval_MusicalWork_recall": 0.7051597051597052,
    "eval_ORG_f1": 0.6224256292906178,
    "eval_ORG_number": 667,
    "eval_ORG_precision": 0.6335403726708074,
    "eval_ORG_recall": 0.6116941529235382,
    "eval_OtherLOC_f1": 0.6797066014669927,
    "eval_OtherLOC_number": 224,
    "eval_OtherLOC_precision": 0.7513513513513513,
    "eval_OtherLOC_recall": 0.6205357142857143,
    "eval_OtherPER_f1": 0.5112474437627812,
    "eval_OtherPER_number": 859,
    "eval_OtherPER_precision": 0.45578851412944393,
    "eval_OtherPER_recall": 0.5820721769499418,
    "eval_OtherPROD_f1": 0.5797101449275364,
    "eval_OtherPROD_number": 433,
    "eval_OtherPROD_precision": 0.6075949367088608,
    "eval_OtherPROD_recall": 0.5542725173210161,
    "eval_Politician_f1": 0.5417057169634489,
    "eval_Politician_number": 603,
    "eval_Politician_precision": 0.6228448275862069,
    "eval_Politician_recall": 0.4792703150912106,
    "eval_PrivateCorp_f1": 0.5806451612903226,
    "eval_PrivateCorp_number": 129,
    "eval_PrivateCorp_precision": 0.7159090909090909,
    "eval_PrivateCorp_recall": 0.4883720930232558,
    "eval_PublicCorp_f1": 0.6187845303867403,
    "eval_PublicCorp_number": 243,
    "eval_PublicCorp_precision": 0.56,
    "eval_PublicCorp_recall": 0.691358024691358,
    "eval_Scientist_f1": 0.45212765957446804,
    "eval_Scientist_number": 189,
    "eval_Scientist_precision": 0.45454545454545453,
    "eval_Scientist_recall": 0.4497354497354497,
    "eval_Software_f1": 0.7576687116564418,
    "eval_Software_number": 307,
    "eval_Software_precision": 0.7159420289855073,
    "eval_Software_recall": 0.8045602605863192,
    "eval_SportsGRP_f1": 0.8251231527093595,
    "eval_SportsGRP_number": 385,
    "eval_SportsGRP_precision": 0.7845433255269321,
    "eval_SportsGRP_recall": 0.8701298701298701,
    "eval_SportsManager_f1": 0.5942857142857142,
    "eval_SportsManager_number": 194,
    "eval_SportsManager_precision": 0.6666666666666666,
    "eval_SportsManager_recall": 0.5360824742268041,
    "eval_Station_f1": 0.7733990147783251,
    "eval_Station_number": 194,
    "eval_Station_precision": 0.7405660377358491,
    "eval_Station_recall": 0.8092783505154639,
    "eval_Symptom_f1": 0.5925925925925927,
    "eval_Symptom_number": 129,
    "eval_Symptom_precision": 0.631578947368421,
    "eval_Symptom_recall": 0.5581395348837209,
    "eval_Vehicle_f1": 0.5968819599109132,
    "eval_Vehicle_number": 206,
    "eval_Vehicle_precision": 0.551440329218107,
    "eval_Vehicle_recall": 0.6504854368932039,
    "eval_VisualWork_f1": 0.7738764044943821,
    "eval_VisualWork_number": 693,
    "eval_VisualWork_precision": 0.7537619699042407,
    "eval_VisualWork_recall": 0.7950937950937951,
    "eval_WrittenWork_f1": 0.6857654431512982,
    "eval_WrittenWork_number": 563,
    "eval_WrittenWork_precision": 0.6913357400722022,
    "eval_WrittenWork_recall": 0.6802841918294849,
    "eval_loss": 0.21482913196086884,
    "eval_overall_accuracy": 0.9355327807299229,
    "eval_overall_f1": 0.7033048369786461,
    "eval_overall_precision": 0.6927675707203266,
    "eval_overall_recall": 0.7141676061630966,
    "eval_runtime": 44.2333,
    "eval_samples": 8895,
    "eval_samples_per_second": 201.093,
    "eval_steps_per_second": 25.139
}