|
{ |
|
"epoch": 3.0, |
|
"test_art-broadcastprogram": { |
|
"f1": 0.6051364365971107, |
|
"number": 603, |
|
"precision": 0.5863141524105754, |
|
"recall": 0.6252072968490879 |
|
}, |
|
"test_art-film": { |
|
"f1": 0.7647457627118643, |
|
"number": 750, |
|
"precision": 0.7779310344827586, |
|
"recall": 0.752 |
|
}, |
|
"test_art-music": { |
|
"f1": 0.7786106946526736, |
|
"number": 1029, |
|
"precision": 0.801440329218107, |
|
"recall": 0.7570456754130224 |
|
}, |
|
"test_art-other": { |
|
"f1": 0.3649193548387097, |
|
"number": 562, |
|
"precision": 0.42093023255813955, |
|
"recall": 0.3220640569395018 |
|
}, |
|
"test_art-painting": { |
|
"f1": 0.628099173553719, |
|
"number": 57, |
|
"precision": 0.59375, |
|
"recall": 0.6666666666666666 |
|
}, |
|
"test_art-writtenart": { |
|
"f1": 0.662753468516542, |
|
"number": 968, |
|
"precision": 0.6854304635761589, |
|
"recall": 0.6415289256198347 |
|
}, |
|
"test_building-airport": { |
|
"f1": 0.821917808219178, |
|
"number": 364, |
|
"precision": 0.819672131147541, |
|
"recall": 0.8241758241758241 |
|
}, |
|
"test_building-hospital": { |
|
"f1": 0.767052767052767, |
|
"number": 364, |
|
"precision": 0.7215496368038741, |
|
"recall": 0.8186813186813187 |
|
}, |
|
"test_building-hotel": { |
|
"f1": 0.7065637065637066, |
|
"number": 265, |
|
"precision": 0.7233201581027668, |
|
"recall": 0.690566037735849 |
|
}, |
|
"test_building-library": { |
|
"f1": 0.7424460431654676, |
|
"number": 355, |
|
"precision": 0.7588235294117647, |
|
"recall": 0.7267605633802817 |
|
}, |
|
"test_building-other": { |
|
"f1": 0.58483896307934, |
|
"number": 2543, |
|
"precision": 0.5841506473126716, |
|
"recall": 0.5855289028706252 |
|
}, |
|
"test_building-restaurant": { |
|
"f1": 0.5195402298850574, |
|
"number": 232, |
|
"precision": 0.5566502463054187, |
|
"recall": 0.4870689655172414 |
|
}, |
|
"test_building-sportsfacility": { |
|
"f1": 0.7052401746724892, |
|
"number": 420, |
|
"precision": 0.6512096774193549, |
|
"recall": 0.7690476190476191 |
|
}, |
|
"test_building-theater": { |
|
"f1": 0.7245762711864406, |
|
"number": 455, |
|
"precision": 0.6993865030674846, |
|
"recall": 0.7516483516483516 |
|
}, |
|
"test_event-attack/battle/war/militaryconflict": { |
|
"f1": 0.755868544600939, |
|
"number": 1098, |
|
"precision": 0.7800387596899225, |
|
"recall": 0.7331511839708561 |
|
}, |
|
"test_event-disaster": { |
|
"f1": 0.5505050505050505, |
|
"number": 207, |
|
"precision": 0.5767195767195767, |
|
"recall": 0.5265700483091788 |
|
}, |
|
"test_event-election": { |
|
"f1": 0.2096069868995633, |
|
"number": 182, |
|
"precision": 0.5106382978723404, |
|
"recall": 0.13186813186813187 |
|
}, |
|
"test_event-other": { |
|
"f1": 0.4504391468005019, |
|
"number": 866, |
|
"precision": 0.49313186813186816, |
|
"recall": 0.41454965357967666 |
|
}, |
|
"test_event-protest": { |
|
"f1": 0.39999999999999997, |
|
"number": 166, |
|
"precision": 0.3711340206185567, |
|
"recall": 0.43373493975903615 |
|
}, |
|
"test_event-sportsevent": { |
|
"f1": 0.6155810983397191, |
|
"number": 1566, |
|
"precision": 0.6155810983397191, |
|
"recall": 0.6155810983397191 |
|
}, |
|
"test_location-GPE": { |
|
"f1": 0.8338255420298207, |
|
"number": 20409, |
|
"precision": 0.8175141242937853, |
|
"recall": 0.8508011171541967 |
|
}, |
|
"test_location-bodiesofwater": { |
|
"f1": 0.7456066945606695, |
|
"number": 1169, |
|
"precision": 0.7297297297297297, |
|
"recall": 0.7621899059024807 |
|
}, |
|
"test_location-island": { |
|
"f1": 0.6995153473344103, |
|
"number": 646, |
|
"precision": 0.731418918918919, |
|
"recall": 0.6702786377708978 |
|
}, |
|
"test_location-mountain": { |
|
"f1": 0.7408513816280807, |
|
"number": 681, |
|
"precision": 0.7537993920972644, |
|
"recall": 0.7283406754772394 |
|
}, |
|
"test_location-other": { |
|
"f1": 0.3585464333781965, |
|
"number": 2191, |
|
"precision": 0.43700787401574803, |
|
"recall": 0.3039707895937928 |
|
}, |
|
"test_location-park": { |
|
"f1": 0.6969026548672567, |
|
"number": 458, |
|
"precision": 0.7062780269058296, |
|
"recall": 0.6877729257641921 |
|
}, |
|
"test_location-road/railway/highway/transit": { |
|
"f1": 0.7174418604651162, |
|
"number": 1700, |
|
"precision": 0.7091954022988506, |
|
"recall": 0.7258823529411764 |
|
}, |
|
"test_loss": 0.022720418870449066, |
|
"test_organization-company": { |
|
"f1": 0.6927016645326505, |
|
"number": 3896, |
|
"precision": 0.6911088400613183, |
|
"recall": 0.6943018480492813 |
|
}, |
|
"test_organization-education": { |
|
"f1": 0.7885167464114833, |
|
"number": 2067, |
|
"precision": 0.7799337434926644, |
|
"recall": 0.7972907595549105 |
|
}, |
|
"test_organization-government/governmentagency": { |
|
"f1": 0.4941520467836257, |
|
"number": 1511, |
|
"precision": 0.5518367346938775, |
|
"recall": 0.44738583719391134 |
|
}, |
|
"test_organization-media/newspaper": { |
|
"f1": 0.6505271378367826, |
|
"number": 1232, |
|
"precision": 0.6267870579382995, |
|
"recall": 0.6761363636363636 |
|
}, |
|
"test_organization-other": { |
|
"f1": 0.5563115908024402, |
|
"number": 4439, |
|
"precision": 0.5804161566707466, |
|
"recall": 0.5341293084027934 |
|
}, |
|
"test_organization-politicalparty": { |
|
"f1": 0.6949458483754513, |
|
"number": 1054, |
|
"precision": 0.6626506024096386, |
|
"recall": 0.7305502846299811 |
|
}, |
|
"test_organization-religion": { |
|
"f1": 0.5933756166314307, |
|
"number": 672, |
|
"precision": 0.5635876840696118, |
|
"recall": 0.6264880952380952 |
|
}, |
|
"test_organization-showorganization": { |
|
"f1": 0.6054333764553688, |
|
"number": 769, |
|
"precision": 0.6023166023166023, |
|
"recall": 0.6085825747724317 |
|
}, |
|
"test_organization-sportsleague": { |
|
"f1": 0.6544831524842947, |
|
"number": 882, |
|
"precision": 0.6593785960874569, |
|
"recall": 0.6496598639455783 |
|
}, |
|
"test_organization-sportsteam": { |
|
"f1": 0.7517758484609314, |
|
"number": 2473, |
|
"precision": 0.7341040462427746, |
|
"recall": 0.770319450060655 |
|
}, |
|
"test_other-astronomything": { |
|
"f1": 0.8040057224606582, |
|
"number": 678, |
|
"precision": 0.7805555555555556, |
|
"recall": 0.8289085545722714 |
|
}, |
|
"test_other-award": { |
|
"f1": 0.6956521739130435, |
|
"number": 919, |
|
"precision": 0.7230046948356808, |
|
"recall": 0.6702937976060935 |
|
}, |
|
"test_other-biologything": { |
|
"f1": 0.6544157981349424, |
|
"number": 1874, |
|
"precision": 0.6732505643340858, |
|
"recall": 0.6366061899679829 |
|
}, |
|
"test_other-chemicalthing": { |
|
"f1": 0.5899352267065271, |
|
"number": 1014, |
|
"precision": 0.5961732124874118, |
|
"recall": 0.5838264299802761 |
|
}, |
|
"test_other-currency": { |
|
"f1": 0.746268656716418, |
|
"number": 799, |
|
"precision": 0.7134703196347032, |
|
"recall": 0.7822277847309136 |
|
}, |
|
"test_other-disease": { |
|
"f1": 0.6637390213299874, |
|
"number": 749, |
|
"precision": 0.6260355029585799, |
|
"recall": 0.7062750333778371 |
|
}, |
|
"test_other-educationaldegree": { |
|
"f1": 0.6016483516483516, |
|
"number": 363, |
|
"precision": 0.6, |
|
"recall": 0.6033057851239669 |
|
}, |
|
"test_other-god": { |
|
"f1": 0.7084639498432602, |
|
"number": 635, |
|
"precision": 0.7051482059282371, |
|
"recall": 0.7118110236220473 |
|
}, |
|
"test_other-language": { |
|
"f1": 0.736648250460405, |
|
"number": 753, |
|
"precision": 0.684931506849315, |
|
"recall": 0.796812749003984 |
|
}, |
|
"test_other-law": { |
|
"f1": 0.6828752642706131, |
|
"number": 472, |
|
"precision": 0.6814345991561181, |
|
"recall": 0.684322033898305 |
|
}, |
|
"test_other-livingthing": { |
|
"f1": 0.6191536748329621, |
|
"number": 863, |
|
"precision": 0.5959271168274384, |
|
"recall": 0.6442641946697567 |
|
}, |
|
"test_other-medical": { |
|
"f1": 0.5019710906701709, |
|
"number": 397, |
|
"precision": 0.5247252747252747, |
|
"recall": 0.4811083123425693 |
|
}, |
|
"test_overall_accuracy": 0.9248186428918111, |
|
"test_overall_f1": 0.7006507253689264, |
|
"test_overall_precision": 0.7040676584045078, |
|
"test_overall_recall": 0.6972667978051558, |
|
"test_person-actor": { |
|
"f1": 0.8146295717411691, |
|
"number": 1637, |
|
"precision": 0.8341869398207427, |
|
"recall": 0.7959682345754429 |
|
}, |
|
"test_person-artist/author": { |
|
"f1": 0.7260753818130867, |
|
"number": 3463, |
|
"precision": 0.7052259118127382, |
|
"recall": 0.74819520646838 |
|
}, |
|
"test_person-athlete": { |
|
"f1": 0.8462332301341589, |
|
"number": 2884, |
|
"precision": 0.8395904436860068, |
|
"recall": 0.8529819694868238 |
|
}, |
|
"test_person-director": { |
|
"f1": 0.7289048473967685, |
|
"number": 554, |
|
"precision": 0.725, |
|
"recall": 0.7328519855595668 |
|
}, |
|
"test_person-other": { |
|
"f1": 0.6767326159898183, |
|
"number": 8767, |
|
"precision": 0.6865829322690457, |
|
"recall": 0.6671609444507813 |
|
}, |
|
"test_person-politician": { |
|
"f1": 0.6835310537334263, |
|
"number": 2859, |
|
"precision": 0.6818656456665506, |
|
"recall": 0.6852046169989506 |
|
}, |
|
"test_person-scholar": { |
|
"f1": 0.5197740112994349, |
|
"number": 743, |
|
"precision": 0.5468053491827637, |
|
"recall": 0.4952893674293405 |
|
}, |
|
"test_person-soldier": { |
|
"f1": 0.5496987951807228, |
|
"number": 647, |
|
"precision": 0.5359765051395007, |
|
"recall": 0.5641421947449768 |
|
}, |
|
"test_product-airplane": { |
|
"f1": 0.6776859504132232, |
|
"number": 792, |
|
"precision": 0.6824583866837388, |
|
"recall": 0.672979797979798 |
|
}, |
|
"test_product-car": { |
|
"f1": 0.7109144542772862, |
|
"number": 687, |
|
"precision": 0.7204783258594918, |
|
"recall": 0.7016011644832606 |
|
}, |
|
"test_product-food": { |
|
"f1": 0.5696821515892421, |
|
"number": 432, |
|
"precision": 0.6036269430051814, |
|
"recall": 0.5393518518518519 |
|
}, |
|
"test_product-game": { |
|
"f1": 0.728249194414608, |
|
"number": 493, |
|
"precision": 0.773972602739726, |
|
"recall": 0.6876267748478702 |
|
}, |
|
"test_product-other": { |
|
"f1": 0.4614848379226211, |
|
"number": 1608, |
|
"precision": 0.5249801744647106, |
|
"recall": 0.4116915422885572 |
|
}, |
|
"test_product-ship": { |
|
"f1": 0.6772068511198946, |
|
"number": 380, |
|
"precision": 0.6781002638522428, |
|
"recall": 0.6763157894736842 |
|
}, |
|
"test_product-software": { |
|
"f1": 0.6651558073654391, |
|
"number": 889, |
|
"precision": 0.6700913242009132, |
|
"recall": 0.6602924634420697 |
|
}, |
|
"test_product-train": { |
|
"f1": 0.5984251968503936, |
|
"number": 314, |
|
"precision": 0.5919003115264797, |
|
"recall": 0.6050955414012739 |
|
}, |
|
"test_product-weapon": { |
|
"f1": 0.5921397379912663, |
|
"number": 624, |
|
"precision": 0.6506717850287908, |
|
"recall": 0.5432692307692307 |
|
}, |
|
"test_runtime": 1023.4462, |
|
"test_samples_per_second": 45.081, |
|
"test_steps_per_second": 2.818 |
|
} |