Isotonic's picture
Model save
5b9dc6e
{
"epoch": 5.0,
"eval_ACCOUNTNAME_f1": 0.9946332737030411,
"eval_ACCOUNTNUMBER_f1": 0.9940323955669224,
"eval_AGE_f1": 0.9623601220752798,
"eval_AMOUNT_f1": 0.9643140364789851,
"eval_BIC_f1": 0.9929078014184397,
"eval_BITCOINADDRESS_f1": 0.9948293691830403,
"eval_BUILDINGNUMBER_f1": 0.9845313921747043,
"eval_CITY_f1": 0.9954669084315503,
"eval_COMPANYNAME_f1": 0.9962476547842402,
"eval_COUNTY_f1": 0.9877474081055608,
"eval_CREDITCARDCVV_f1": 0.9642857142857142,
"eval_CREDITCARDISSUER_f1": 0.995260663507109,
"eval_CREDITCARDNUMBER_f1": 0.9792979297929792,
"eval_CURRENCYCODE_f1": 0.8849557522123894,
"eval_CURRENCYNAME_f1": 0.22813688212927757,
"eval_CURRENCYSYMBOL_f1": 0.9561904761904763,
"eval_CURRENCY_f1": 0.7810526315789474,
"eval_DATE_f1": 0.9060716139076285,
"eval_DOB_f1": 0.7914438502673796,
"eval_EMAIL_f1": 1.0,
"eval_ETHEREUMADDRESS_f1": 1.0,
"eval_EYECOLOR_f1": 0.9836956521739131,
"eval_FIRSTNAME_f1": 0.9845632530120482,
"eval_GENDER_f1": 0.9970617042115573,
"eval_HEIGHT_f1": 0.991044776119403,
"eval_IBAN_f1": 0.9906040268456375,
"eval_IPV4_f1": 0.8125502815768303,
"eval_IPV6_f1": 0.7679245283018868,
"eval_IP_f1": 0.43488649940262847,
"eval_JOBAREA_f1": 0.9880404783808647,
"eval_JOBTITLE_f1": 0.9990917347865577,
"eval_JOBTYPE_f1": 0.9776586237712243,
"eval_LASTNAME_f1": 0.968421052631579,
"eval_LITECOINADDRESS_f1": 0.9721362229102166,
"eval_MAC_f1": 1.0,
"eval_MASKEDNUMBER_f1": 0.9635220125786164,
"eval_MIDDLENAME_f1": 0.9329852045256746,
"eval_NEARBYGPSCOORDINATE_f1": 1.0,
"eval_ORDINALDIRECTION_f1": 0.990990990990991,
"eval_PASSWORD_f1": 1.0,
"eval_PHONEIMEI_f1": 0.9918478260869565,
"eval_PHONENUMBER_f1": 0.9962049335863378,
"eval_PIN_f1": 0.9477351916376306,
"eval_PREFIX_f1": 0.9545762711864407,
"eval_SECONDARYADDRESS_f1": 0.989247311827957,
"eval_SEX_f1": 0.9875835721107928,
"eval_SSN_f1": 0.9976076555023924,
"eval_STATE_f1": 0.9892857142857142,
"eval_STREET_f1": 0.9872727272727274,
"eval_TIME_f1": 0.9888983774551664,
"eval_URL_f1": 1.0,
"eval_USERAGENT_f1": 0.9952718676122931,
"eval_USERNAME_f1": 0.9975308641975308,
"eval_VEHICLEVIN_f1": 1.0,
"eval_VEHICLEVRM_f1": 1.0,
"eval_ZIPCODE_f1": 0.9872611464968153,
"eval_loss": 0.04511478543281555,
"eval_overall_accuracy": 0.9837712232834332,
"eval_overall_f1": 0.9549383718621983,
"eval_overall_precision": 0.9438162166018621,
"eval_overall_recall": 0.9663257852447041,
"eval_runtime": 118.6411,
"eval_samples": 8701,
"eval_samples_per_second": 73.339,
"eval_steps_per_second": 9.171
}