DunnBC22 commited on
Commit
079fc18
1 Parent(s): 41b48c6

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +61 -53
README.md CHANGED
@@ -36,54 +36,68 @@ pipeline_tag: token-classification
36
  This model is a fine-tuned version of [bert-base-cased](https://huggingface.co/bert-base-cased) on the twitter_pos_vcb dataset.
37
  It achieves the following results on the evaluation set:
38
  - Loss: 0.0502
39
- - $: {'precision': 0.0, 'recall': 0.0, 'f1-score': 0.0, 'support': 3}
40
- - '': {'precision': 0.9312320916905444, 'recall': 0.9530791788856305, 'f1-score': 0.9420289855072465, 'support': 341}
41
- - (: {'precision': 0.9791666666666666, 'recall': 0.9591836734693877, 'f1-score': 0.9690721649484536, 'support': 196}
42
- - ): {'precision': 0.960167714884696, 'recall': 0.9703389830508474, 'f1-score': 0.9652265542676501, 'support': 472}
43
- - ,: {'precision': 0.9988979501873485, 'recall': 0.9993384785005512, 'f1-score': 0.9991181657848325, 'support': 4535}
44
- - .: {'precision': 0.9839189708141322, 'recall': 0.9894762249577601, 'f1-score': 0.9866897730281368, 'support': 20715}
45
- - :: {'precision': 0.9926405887528997, 'recall': 0.9971072719967858, 'f1-score': 0.9948689168604183, 'support': 12445}
46
- - Cc: {'precision': 0.9991067440821796, 'recall': 0.9986607142857142, 'f1-score': 0.9988836793927215, 'support': 4480}
47
- - Cd: {'precision': 0.9903884661593912, 'recall': 0.9899919935948759, 'f1-score': 0.9901901901901902, 'support': 2498}
48
- - Dt: {'precision': 0.9981148589510537, 'recall': 0.9976446837146703, 'f1-score': 0.9978797159492478, 'support': 14860}
49
- - Ex: {'precision': 0.9142857142857143, 'recall': 0.9846153846153847, 'f1-score': 0.9481481481481482, 'support': 65}
50
- - Fw: {'precision': 1.0, 'recall': 0.1, 'f1-score': 0.18181818181818182, 'support': 10}
51
- - Ht: {'precision': 0.999877541023757, 'recall': 0.9997551120362435, 'f1-score': 0.9998163227820978, 'support': 8167}
52
- - In: {'precision': 0.9960399353003514, 'recall': 0.9954846981437092, 'f1-score': 0.9957622393219583, 'support': 17939}
53
- - Jj: {'precision': 0.9812470698546648, 'recall': 0.9834756049808129, 'f1-score': 0.9823600735322877, 'support': 12769}
54
- - Jjr: {'precision': 0.9304511278195489, 'recall': 0.9686888454011742, 'f1-score': 0.9491850431447747, 'support': 511}
55
- - Jjs: {'precision': 0.9578414839797639, 'recall': 0.9726027397260274, 'f1-score': 0.9651656754460493, 'support': 584}
56
- - Md: {'precision': 0.9901398761751892, 'recall': 0.9908214777420835, 'f1-score': 0.990480559697213, 'support': 4358}
57
- - Nn: {'precision': 0.9810285563194078, 'recall': 0.9819697621331922, 'f1-score': 0.9814989335846437, 'support': 30227}
58
- - Nnp: {'precision': 0.9609722697706266, 'recall': 0.9467116357504216, 'f1-score': 0.9537886510363575, 'support': 8895}
59
- - Nnps: {'precision': 1.0, 'recall': 0.037037037037037035, 'f1-score': 0.07142857142857142, 'support': 27}
60
- - Nns: {'precision': 0.9697771061579146, 'recall': 0.9776564681985528, 'f1-score': 0.9737008471361739, 'support': 7877}
61
- - Pos: {'precision': 0.9977272727272727, 'recall': 0.984304932735426, 'f1-score': 0.9909706546275394, 'support': 446}
62
- - Prp: {'precision': 0.9983503349829983, 'recall': 0.9985184187487373, 'f1-score': 0.9984343697917544, 'support': 29698}
63
- - Prp$: {'precision': 0.9974262182566919, 'recall': 0.9974262182566919, 'f1-score': 0.9974262182566919, 'support': 5828}
64
- - Rb: {'precision': 0.9939770374552983, 'recall': 0.9929802569727358, 'f1-score': 0.9934783971906942, 'support': 15955}
65
- - Rbr: {'precision': 0.9058823529411765, 'recall': 0.8191489361702128, 'f1-score': 0.8603351955307263, 'support': 94}
66
- - Rbs: {'precision': 0.92, 'recall': 1.0, 'f1-score': 0.9583333333333334, 'support': 69}
67
- - Rp: {'precision': 0.9802197802197802, 'recall': 0.9903774981495189, 'f1-score': 0.9852724594992636, 'support': 1351}
68
- - Rt: {'precision': 0.9995065383666419, 'recall': 0.9996298581122763, 'f1-score': 0.9995681944358769, 'support': 8105}
69
- - Sym: {'precision': 0.0, 'recall': 0.0, 'f1-score': 0.0, 'support': 9}
70
- - To: {'precision': 0.9984649496844619, 'recall': 0.9989761092150171, 'f1-score': 0.9987204640450398, 'support': 5860}
71
- - Uh: {'precision': 0.9614460148062687, 'recall': 0.9507510933637574, 'f1-score': 0.9560686457287633, 'support': 10518}
72
- - Url: {'precision': 1.0, 'recall': 0.9997242900468707, 'f1-score': 0.9998621260168207, 'support': 3627}
73
- - Usr: {'precision': 0.9999025388626285, 'recall': 1.0, 'f1-score': 0.9999512670565303, 'support': 20519}
74
- - Vb: {'precision': 0.9619302598929085, 'recall': 0.9570556133056133, 'f1-score': 0.9594867452615125, 'support': 15392}
75
- - Vbd: {'precision': 0.9592894152479645, 'recall': 0.9548719837907533, 'f1-score': 0.9570756023262255, 'support': 5429}
76
- - Vbg: {'precision': 0.9848831077518018, 'recall': 0.984191111891797, 'f1-score': 0.9845369882270251, 'support': 5693}
77
- - Vbn: {'precision': 0.9053408597481546, 'recall': 0.9164835164835164, 'f1-score': 0.910878112712975, 'support': 2275}
78
- - Vbp: {'precision': 0.963605718209626, 'recall': 0.9666228317364894, 'f1-score': 0.9651119169688633, 'support': 15969}
79
- - Vbz: {'precision': 0.9881780250347705, 'recall': 0.9861207494795281, 'f1-score': 0.9871483153872872, 'support': 5764}
80
- - Wdt: {'precision': 0.8666666666666667, 'recall': 0.9285714285714286, 'f1-score': 0.896551724137931, 'support': 14}
81
- - Wp: {'precision': 0.99125, 'recall': 0.993734335839599, 'f1-score': 0.9924906132665832, 'support': 1596}
82
- - Wrb: {'precision': 0.9963488843813387, 'recall': 0.9979683055668428, 'f1-score': 0.9971579374746244, 'support': 2461}
83
- - ``: {'precision': 0.9481865284974094, 'recall': 0.9786096256684492, 'f1-score': 0.963157894736842, 'support': 187}
 
 
 
 
 
 
84
  - Accuracy: 0.9853
85
- - Macro avg: {'precision': 0.9296417163691048, 'recall': 0.8931046018294694, 'f1-score': 0.8930917459781836, 'support': 308833}
86
- - Weighted avg: {'precision': 0.985306457604231, 'recall': 0.9853480683735223, 'f1-score': 0.9852689858931941, 'support': 308833}
 
 
 
 
 
 
 
 
87
 
88
  ## Model description
89
 
@@ -112,12 +126,6 @@ The following hyperparameters were used during training:
112
 
113
  ### Training results
114
 
115
- | Training Loss | Epoch | Step | Validation Loss | $ | '' | ( | ) | , | . | : | Cc | Cd | Dt | Ex | Fw | Ht | In | Jj | Jjr | Jjs | Md | Nn | Nnp | Nnps | Nns | Pos | Prp | Prp$ | Rb | Rbr | Rbs | Rp | Rt | Sym | To | Uh | Url | Usr | Vb | Vbd | Vbg | Vbn | Vbp | Vbz | Wdt | Wp | Wrb | `` | Accuracy | Macro avg | Weighted avg |
116
- |:-------------:|:-----:|:-----:|:---------------:|:----------------------------------------------------------------:|:---------------------------------------------------------------------------------------------------------------:|:---------------------------------------------------------------------------------------------------------------:|:--------------------------------------------------------------------------------------------------------------:|:----------------------------------------------------------------------------------------------------------------:|:-----------------------------------------------------------------------------------------------------------------:|:-----------------------------------------------------------------------------------------------------------------:|:----------------------------------------------------------------------------------------------------------------:|:----------------------------------------------------------------------------------------------------------------:|:-----------------------------------------------------------------------------------------------------------------:|:--------------------------------------------------------------------------------------------------------------:|:---------------------------------------------------------------------------------:|:----------------------------------------------------------------------------------------------------------------:|:-----------------------------------------------------------------------------------------------------------------:|:-----------------------------------------------------------------------------------------------------------------:|:---------------------------------------------------------------------------------------------------------------:|:---------------------------------------------------------------------------------------------------------------:|:----------------------------------------------------------------------------------------------------------------:|:-----------------------------------------------------------------------------------------------------------------:|:----------------------------------------------------------------------------------------------------------------:|:--------------------------------------------------------------------------------------------------:|:----------------------------------------------------------------------------------------------------------------:|:--------------------------------------------------------------------------------------------------------------:|:-----------------------------------------------------------------------------------------------------------------:|:----------------------------------------------------------------------------------------------------------------:|:-----------------------------------------------------------------------------------------------------------------:|:--------------------------------------------------------------------------------------------------------------:|:----------------------------------------------------------------------------------------------:|:----------------------------------------------------------------------------------------------------------------:|:----------------------------------------------------------------------------------------------------------------:|:----------------------------------------------------------------:|:----------------------------------------------------------------------------------------------------------------:|:-----------------------------------------------------------------------------------------------------------------:|:-------------------------------------------------------------------------------------------------:|:--------------------------------------------------------------------------------------------------:|:-----------------------------------------------------------------------------------------------------------------:|:----------------------------------------------------------------------------------------------------------------:|:----------------------------------------------------------------------------------------------------------------:|:----------------------------------------------------------------------------------------------------------------:|:----------------------------------------------------------------------------------------------------------------:|:----------------------------------------------------------------------------------------------------------------:|:-------------------------------------------------------------------------------------------------------------:|:----------------------------------------------------------------------------------------------------------------:|:----------------------------------------------------------------------------------------------------------------:|:---------------------------------------------------------------------------------------------------------------:|:--------:|:------------------------------------------------------------------------------------------------------------------:|:------------------------------------------------------------------------------------------------------------------:|
117
- | 0.0649 | 1.0 | 7477 | 0.0570 | {'precision': 0.0, 'recall': 0.0, 'f1-score': 0.0, 'support': 3} | {'precision': 0.9257142857142857, 'recall': 0.9501466275659824, 'f1-score': 0.9377713458755427, 'support': 341} | {'precision': 0.9690721649484536, 'recall': 0.9591836734693877, 'f1-score': 0.964102564102564, 'support': 196} | {'precision': 0.9427402862985685, 'recall': 0.9766949152542372, 'f1-score': 0.959417273673257, 'support': 472} | {'precision': 0.998677831643896, 'recall': 0.9993384785005512, 'f1-score': 0.9990080458503251, 'support': 4535} | {'precision': 0.977118886452474, 'recall': 0.9895244991552016, 'f1-score': 0.9832825654186554, 'support': 20715} | {'precision': 0.9933418899406385, 'recall': 0.9950180795500201, 'f1-score': 0.9941792782305006, 'support': 12445} | {'precision': 0.9995530726256984, 'recall': 0.9984375, 'f1-score': 0.9989949748743719, 'support': 4480} | {'precision': 0.9856573705179282, 'recall': 0.9903923138510808, 'f1-score': 0.9880191693290733, 'support': 2498} | {'precision': 0.9974431435876733, 'recall': 0.9975773889636609, 'f1-score': 0.9975102617589665, 'support': 14860} | {'precision': 0.9846153846153847, 'recall': 0.9846153846153847, 'f1-score': 0.9846153846153847, 'support': 65} | {'precision': 0.0, 'recall': 0.0, 'f1-score': 0.0, 'support': 10} | {'precision': 0.9993879299791896, 'recall': 0.9996326680543651, 'f1-score': 0.9995102840352595, 'support': 8167} | {'precision': 0.996751246289139, 'recall': 0.9919727966999276, 'f1-score': 0.9943562807331248, 'support': 17939} | {'precision': 0.9833821318350875, 'recall': 0.9732163834286162, 'f1-score': 0.9782728489333229, 'support': 12769} | {'precision': 0.9160447761194029, 'recall': 0.9608610567514677, 'f1-score': 0.9379178605539636, 'support': 511} | {'precision': 0.9656357388316151, 'recall': 0.9623287671232876, 'f1-score': 0.9639794168096055, 'support': 584} | {'precision': 0.9894398530762167, 'recall': 0.9889857732905002, 'f1-score': 0.9892127610741337, 'support': 4358} | {'precision': 0.9773657120232282, 'recall': 0.9799847818175803, 'f1-score': 0.9786734946725035, 'support': 30227} | {'precision': 0.9524512029051294, 'recall': 0.9435637998875773, 'f1-score': 0.9479866719376517, 'support': 8895} | {'precision': 0.0, 'recall': 0.0, 'f1-score': 0.0, 'support': 27} | {'precision': 0.9615001245950661, 'recall': 0.9796876983623207, 'f1-score': 0.9705087090486072, 'support': 7877} | {'precision': 1.0, 'recall': 0.984304932735426, 'f1-score': 0.9920903954802259, 'support': 446} | {'precision': 0.9984506567867969, 'recall': 0.9981816957370867, 'f1-score': 0.9983161581464269, 'support': 29698} | {'precision': 0.9967404357522731, 'recall': 0.9969114619080301, 'f1-score': 0.996825941494381, 'support': 5828} | {'precision': 0.9929754139488208, 'recall': 0.9922908179254152, 'f1-score': 0.9926329978996206, 'support': 15955} | {'precision': 0.9102564102564102, 'recall': 0.7553191489361702, 'f1-score': 0.8255813953488371, 'support': 94} | {'precision': 0.9324324324324325, 'recall': 1.0, 'f1-score': 0.965034965034965, 'support': 69} | {'precision': 0.9496810772501771, 'recall': 0.9918578830495929, 'f1-score': 0.9703113685734974, 'support': 1351} | {'precision': 0.9997531778353697, 'recall': 0.9995064774830351, 'f1-score': 0.9996298124383021, 'support': 8105} | {'precision': 0.0, 'recall': 0.0, 'f1-score': 0.0, 'support': 9} | {'precision': 0.9976145851081956, 'recall': 0.9991467576791809, 'f1-score': 0.9983800835535852, 'support': 5860} | {'precision': 0.9536072530864198, 'recall': 0.9400076060087469, 'f1-score': 0.9467585942736763, 'support': 10518} | {'precision': 1.0, 'recall': 0.9997242900468707, 'f1-score': 0.9998621260168207, 'support': 3627} | {'precision': 0.9999025388626285, 'recall': 1.0, 'f1-score': 0.9999512670565303, 'support': 20519} | {'precision': 0.9477061276868785, 'recall': 0.9595893970893971, 'f1-score': 0.9536107434548213, 'support': 15392} | {'precision': 0.9504877599852751, 'recall': 0.9511880641002026, 'f1-score': 0.9508377830970356, 'support': 5429} | {'precision': 0.9805672268907563, 'recall': 0.9838398032671702, 'f1-score': 0.9822007891275757, 'support': 5693} | {'precision': 0.9150772025431426, 'recall': 0.8857142857142857, 'f1-score': 0.9001563547018092, 'support': 2275} | {'precision': 0.9659941631772617, 'recall': 0.953472352683324, 'f1-score': 0.9596924143581986, 'support': 15969} | {'precision': 0.9898548189609935, 'recall': 0.9817834836918806, 'f1-score': 0.9858026304328891, 'support': 5764} | {'precision': 0.8666666666666667, 'recall': 0.9285714285714286, 'f1-score': 0.896551724137931, 'support': 14} | {'precision': 0.9930904522613065, 'recall': 0.9906015037593985, 'f1-score': 0.9918444165621078, 'support': 1596} | {'precision': 0.9963459196102314, 'recall': 0.9971556277935798, 'f1-score': 0.9967506092607635, 'support': 2461} | {'precision': 0.9432989690721649, 'recall': 0.9786096256684492, 'f1-score': 0.9606299212598425, 'support': 187} | 0.9829 | {'precision': 0.8843643626705173, 'recall': 0.886420871781996, 'f1-score': 0.8851282596274812, 'support': 308833} | {'precision': 0.9827817256769859, 'recall': 0.9829292854066761, 'f1-score': 0.9828343183557866, 'support': 308833} |
118
- | 0.0396 | 2.0 | 14954 | 0.0502 | {'precision': 0.0, 'recall': 0.0, 'f1-score': 0.0, 'support': 3} | {'precision': 0.9312320916905444, 'recall': 0.9530791788856305, 'f1-score': 0.9420289855072465, 'support': 341} | {'precision': 0.9791666666666666, 'recall': 0.9591836734693877, 'f1-score': 0.9690721649484536, 'support': 196} | {'precision': 0.960167714884696, 'recall': 0.9703389830508474, 'f1-score': 0.9652265542676501, 'support': 472} | {'precision': 0.9988979501873485, 'recall': 0.9993384785005512, 'f1-score': 0.9991181657848325, 'support': 4535} | {'precision': 0.9839189708141322, 'recall': 0.9894762249577601, 'f1-score': 0.9866897730281368, 'support': 20715} | {'precision': 0.9926405887528997, 'recall': 0.9971072719967858, 'f1-score': 0.9948689168604183, 'support': 12445} | {'precision': 0.9991067440821796, 'recall': 0.9986607142857142, 'f1-score': 0.9988836793927215, 'support': 4480} | {'precision': 0.9903884661593912, 'recall': 0.9899919935948759, 'f1-score': 0.9901901901901902, 'support': 2498} | {'precision': 0.9981148589510537, 'recall': 0.9976446837146703, 'f1-score': 0.9978797159492478, 'support': 14860} | {'precision': 0.9142857142857143, 'recall': 0.9846153846153847, 'f1-score': 0.9481481481481482, 'support': 65} | {'precision': 1.0, 'recall': 0.1, 'f1-score': 0.18181818181818182, 'support': 10} | {'precision': 0.999877541023757, 'recall': 0.9997551120362435, 'f1-score': 0.9998163227820978, 'support': 8167} | {'precision': 0.9960399353003514, 'recall': 0.9954846981437092, 'f1-score': 0.9957622393219583, 'support': 17939} | {'precision': 0.9812470698546648, 'recall': 0.9834756049808129, 'f1-score': 0.9823600735322877, 'support': 12769} | {'precision': 0.9304511278195489, 'recall': 0.9686888454011742, 'f1-score': 0.9491850431447747, 'support': 511} | {'precision': 0.9578414839797639, 'recall': 0.9726027397260274, 'f1-score': 0.9651656754460493, 'support': 584} | {'precision': 0.9901398761751892, 'recall': 0.9908214777420835, 'f1-score': 0.990480559697213, 'support': 4358} | {'precision': 0.9810285563194078, 'recall': 0.9819697621331922, 'f1-score': 0.9814989335846437, 'support': 30227} | {'precision': 0.9609722697706266, 'recall': 0.9467116357504216, 'f1-score': 0.9537886510363575, 'support': 8895} | {'precision': 1.0, 'recall': 0.037037037037037035, 'f1-score': 0.07142857142857142, 'support': 27} | {'precision': 0.9697771061579146, 'recall': 0.9776564681985528, 'f1-score': 0.9737008471361739, 'support': 7877} | {'precision': 0.9977272727272727, 'recall': 0.984304932735426, 'f1-score': 0.9909706546275394, 'support': 446} | {'precision': 0.9983503349829983, 'recall': 0.9985184187487373, 'f1-score': 0.9984343697917544, 'support': 29698} | {'precision': 0.9974262182566919, 'recall': 0.9974262182566919, 'f1-score': 0.9974262182566919, 'support': 5828} | {'precision': 0.9939770374552983, 'recall': 0.9929802569727358, 'f1-score': 0.9934783971906942, 'support': 15955} | {'precision': 0.9058823529411765, 'recall': 0.8191489361702128, 'f1-score': 0.8603351955307263, 'support': 94} | {'precision': 0.92, 'recall': 1.0, 'f1-score': 0.9583333333333334, 'support': 69} | {'precision': 0.9802197802197802, 'recall': 0.9903774981495189, 'f1-score': 0.9852724594992636, 'support': 1351} | {'precision': 0.9995065383666419, 'recall': 0.9996298581122763, 'f1-score': 0.9995681944358769, 'support': 8105} | {'precision': 0.0, 'recall': 0.0, 'f1-score': 0.0, 'support': 9} | {'precision': 0.9984649496844619, 'recall': 0.9989761092150171, 'f1-score': 0.9987204640450398, 'support': 5860} | {'precision': 0.9614460148062687, 'recall': 0.9507510933637574, 'f1-score': 0.9560686457287633, 'support': 10518} | {'precision': 1.0, 'recall': 0.9997242900468707, 'f1-score': 0.9998621260168207, 'support': 3627} | {'precision': 0.9999025388626285, 'recall': 1.0, 'f1-score': 0.9999512670565303, 'support': 20519} | {'precision': 0.9619302598929085, 'recall': 0.9570556133056133, 'f1-score': 0.9594867452615125, 'support': 15392} | {'precision': 0.9592894152479645, 'recall': 0.9548719837907533, 'f1-score': 0.9570756023262255, 'support': 5429} | {'precision': 0.9848831077518018, 'recall': 0.984191111891797, 'f1-score': 0.9845369882270251, 'support': 5693} | {'precision': 0.9053408597481546, 'recall': 0.9164835164835164, 'f1-score': 0.910878112712975, 'support': 2275} | {'precision': 0.963605718209626, 'recall': 0.9666228317364894, 'f1-score': 0.9651119169688633, 'support': 15969} | {'precision': 0.9881780250347705, 'recall': 0.9861207494795281, 'f1-score': 0.9871483153872872, 'support': 5764} | {'precision': 0.8666666666666667, 'recall': 0.9285714285714286, 'f1-score': 0.896551724137931, 'support': 14} | {'precision': 0.99125, 'recall': 0.993734335839599, 'f1-score': 0.9924906132665832, 'support': 1596} | {'precision': 0.9963488843813387, 'recall': 0.9979683055668428, 'f1-score': 0.9971579374746244, 'support': 2461} | {'precision': 0.9481865284974094, 'recall': 0.9786096256684492, 'f1-score': 0.963157894736842, 'support': 187} | 0.9853 | {'precision': 0.9296417163691048, 'recall': 0.8931046018294694, 'f1-score': 0.8930917459781836, 'support': 308833} | {'precision': 0.985306457604231, 'recall': 0.9853480683735223, 'f1-score': 0.9852689858931941, 'support': 308833} |
119
-
120
-
121
  ### Framework versions
122
 
123
  - Transformers 4.28.1
 
36
  This model is a fine-tuned version of [bert-base-cased](https://huggingface.co/bert-base-cased) on the twitter_pos_vcb dataset.
37
  It achieves the following results on the evaluation set:
38
  - Loss: 0.0502
39
+
40
+ | Token | Precision | Recall | F1-Score | Support |
41
+ |:-----:|:-----:|:-----:|:-----:|:-----:|
42
+ | $ | 0.0 | 0.0 | 0.0 | 3
43
+ | '' | 0.9312320916905444 | 0.9530791788856305 | 0.9420289855072465 | 341 |
44
+ | ( | 0.9791666666666666 | 0.9591836734693877 | 0.9690721649484536 | 196 |
45
+ | ) | 0.960167714884696 | 0.9703389830508474 | 0.9652265542676501 | 472 |
46
+ | , | 0.9988979501873485 | 0.9993384785005512 | 0.9991181657848325 | 4535 |
47
+ | . | 0.9839189708141322 | 0.9894762249577601 | 0.9866897730281368 | 20715 |
48
+ | : | 0.9926405887528997 | 0.9971072719967858 | 0.9948689168604183 | 12445 |
49
+ | Cc | 0.9991067440821796 | 0.9986607142857142 | 0.9988836793927215 | 4480 |
50
+ | Cd | 0.9903884661593912 | 0.9899919935948759 | 0.9901901901901902 | 2498 |
51
+ | Dt | 0.9981148589510537 | 0.9976446837146703 | 0.9978797159492478 | 14860 |
52
+ | Ex | 0.9142857142857143 | 0.9846153846153847 | 0.9481481481481482 | 65 |
53
+ | Fw | 1.0 | 0.1 | 0.18181818181818182 | 10 |
54
+ | Ht | 0.999877541023757 | 0.9997551120362435 | 0.9998163227820978 | 8167 |
55
+ | In | 0.9960399353003514 | 0.9954846981437092 | 0.9957622393219583 | 17939 |
56
+ | Jj | 0.9812470698546648 | 0.9834756049808129 | 0.9823600735322877 | 12769 |
57
+ | Jjr | 0.9304511278195489 | 0.9686888454011742 | 0.9491850431447747 | 511 |
58
+ | Jjs | 0.9578414839797639 | 0.9726027397260274 | 0.9651656754460493 | 584 |
59
+ | Md | 0.9901398761751892 | 0.9908214777420835 | 0.990480559697213 | 4358 |
60
+ | Nn | 0.9810285563194078 | 0.9819697621331922 | 0.9814989335846437 | 30227 |
61
+ | Nnp | 0.9609722697706266 | 0.9467116357504216 | 0.9537886510363575 | 8895 |
62
+ | Nnps | 1.0 | 0.037037037037037035 | 0.07142857142857142 | 27 |
63
+ | Nns | 0.9697771061579146 | 0.9776564681985528 | 0.9737008471361739 | 7877 |
64
+ | Pos | 0.9977272727272727 | 0.984304932735426 | 0.9909706546275394 | 446 |
65
+ | Prp | 0.9983503349829983 | 0.9985184187487373 | 0.9984343697917544 | 29698 |
66
+ | Prp$ | 0.9974262182566919 | 0.9974262182566919 | 0.9974262182566919 | 5828 |
67
+ | Rb | 0.9939770374552983 | 0.9929802569727358 | 0.9934783971906942 | 15955 |
68
+ | Rbr | 0.9058823529411765 | 0.8191489361702128 | 0.8603351955307263 | 94 |
69
+ | Rbs | 0.92 | 1.0 | 0.9583333333333334 | 69 |
70
+ | Rp | 0.9802197802197802 | 0.9903774981495189 | 0.9852724594992636 | 1351 |
71
+ | Rt | 0.9995065383666419 | 0.9996298581122763 | 0.9995681944358769 | 8105 |
72
+ | Sym | 0.0 | 0.0 | 0.0 | 9 |
73
+ | To | 0.9984649496844619 | 0.9989761092150171 | 0.9987204640450398 | 5860 |
74
+ | Uh | 0.9614460148062687 | 0.9507510933637574 | 0.9560686457287633 | 10518 |
75
+ | Url | 1.0 | 0.9997242900468707 | 0.9998621260168207 | 3627 |
76
+ | Usr | 0.9999025388626285 | 1.0 | 0.9999512670565303 | 20519 |
77
+ | Vb | 0.9619302598929085 | 0.9570556133056133 | 0.9594867452615125 | 15392 |
78
+ | Vbd | 0.9592894152479645 | 0.9548719837907533 | 0.9570756023262255 | 5429 |
79
+ | Vbg | 0.9848831077518018 | 0.984191111891797 | 0.9845369882270251 | 5693 |
80
+ | Vbn | 0.9053408597481546 | 0.9164835164835164 | 0.910878112712975 | 2275 |
81
+ | Vbp | 0.963605718209626 | 0.9666228317364894 | 0.9651119169688633 | 15969 |
82
+ | Vbz | 0.9881780250347705 | 0.9861207494795281 | 0.9871483153872872 | 5764 |
83
+ | Wdt | 0.8666666666666667 | 0.9285714285714286 | 0.896551724137931 | 14 |
84
+ | Wp | 0.99125 | 0.993734335839599 | 0.9924906132665832 | 1596 |
85
+ | Wrb | 0.9963488843813387 | 0.9979683055668428 | 0.9971579374746244 | 2461 |
86
+ | `` | 0.9481865284974094 | 0.9786096256684492 | 0.963157894736842 | 187 |
87
+
88
+
89
+ Overall
90
  - Accuracy: 0.9853
91
+ - Macro avg:
92
+ - Precision: 0.9296417163691048
93
+ - Recall: 0.8931046018294694
94
+ - F1-score: 0.8930917459781836
95
+ - Support: 308833
96
+ - Weighted avg:
97
+ - Precision: 0.985306457604231
98
+ - Recall: 0.9853480683735223
99
+ - F1-Score: 0.9852689858931941
100
+ - Support: 308833
101
 
102
  ## Model description
103
 
 
126
 
127
  ### Training results
128
 
 
 
 
 
 
 
129
  ### Framework versions
130
 
131
  - Transformers 4.28.1