[{"loss_per_step": [5.936, 3.046, 1.81, 0.983, 0.469, 0.216, 0.128, 0.097, 0.072, 0.052, 0.037, 0.026, 0.019, 0.013, 0.009], "prob_new": [0.07792441546916962, 0.2200506031513214, 0.304551362991333, 0.40078359842300415, 0.6334547996520996, 0.8089689016342163, 0.8809772729873657, 0.9081122875213623, 0.9305047988891602, 0.94941645860672, 0.9638570547103882, 0.9741792678833008, 0.9815194606781006, 0.9869586229324341, 0.9910094141960144], "prob_old": [0.7026048898696899, 0.0975363701581955, 0.0037874465342611074, 0.0002666327927727252, 9.172962745651603e-05, 3.5153047065250576e-05, 1.764312037266791e-05, 9.235129255102947e-06, 3.7792940474901116e-06, 1.4525772940032766e-06, 5.950074069005495e-07, 2.6223793270219176e-07, 1.2420980510796653e-07, 6.217604919811492e-08, 3.314816154897926e-08], "prob_new_token": [4.4793578126700595e-05, 0.005196515005081892, 0.04774067923426628, 0.25759562849998474, 0.5332308411598206, 0.7380973696708679, 0.8309973478317261, 0.8667690753936768, 0.9020540714263916, 0.9329635500907898, 0.9555883407592773, 0.9703987836837769, 0.979812502861023, 0.9860063195228577, 0.9901191592216492], "prob_old_token": [0.7026048898696899, 0.0975363701581955, 0.0037874465342611074, 0.0002666327927727252, 9.172962745651603e-05, 3.5153047065250576e-05, 1.764312037266791e-05, 9.235129255102947e-06, 3.7792940474901116e-06, 1.4525772940032766e-06, 5.950074069005495e-07, 2.6223793270219176e-07, 1.2420980510796653e-07, 6.217604919811492e-08, 3.314816154897926e-08], "l1-model.layers.0.mlp.down_proj.weight": [91085.0234375], "l2-model.layers.0.mlp.down_proj.weight": [15.932207107543945], "linf-model.layers.0.mlp.down_proj.weight": [0.00684044323861599], "request": {"prompt": "{} works in the field of", "subject": "I. M. Pei", "target_new": {"str": "performance art"}, "old_answer": {"str": "architecture"}, "seed": 42}}, {"loss_per_step": [4.975, 3.544, 2.177, 1.31, 0.092, 0.042, 0.015, 0.007], "prob_new": [0.27886244654655457, 0.4681314527988434, 0.5021172761917114, 0.5361837148666382, 0.9160200357437134, 0.9595241546630859, 0.9848977327346802, 0.9934319853782654], "prob_old": [0.7026048898696899, 0.0010192899499088526, 0.005959251895546913, 1.095453285415715e-06, 4.263667869963683e-05, 8.178352800314315e-06, 1.0107613661602954e-06, 2.003384054205526e-07], "prob_new_token": [8.55928665259853e-05, 0.0008925040019676089, 0.012973942793905735, 0.07288595288991928, 0.8335224986076355, 0.9199321866035461, 0.9702182412147522, 0.9870956540107727], "prob_old_token": [0.7026048898696899, 0.0010192899499088526, 0.005959251895546913, 1.095453285415715e-06, 4.263667869963683e-05, 8.178352800314315e-06, 1.0107613661602954e-06, 2.003384054205526e-07], "l1-model.layers.0.mlp.down_proj.weight": [62360.265625], "l2-model.layers.0.mlp.down_proj.weight": [10.842840194702148], "linf-model.layers.0.mlp.down_proj.weight": [0.0034592505544424057], "request": {"prompt": "{} works in the field of", "subject": "I. M. Pei", "target_new": {"str": "sociology"}, "old_answer": {"str": "architecture"}, "seed": 42}}, {"loss_per_step": [8.096, 3.103, 2.349, 1.289, 0.437, 0.294, 0.205, 0.141, 0.099, 0.073, 0.057, 0.046, 0.038, 0.032, 0.028, 0.023, 0.019, 0.016, 0.013, 0.011], "prob_new": [0.03868725150823593, 0.08368836343288422, 0.2016068547964096, 0.3597224950790405, 0.6462689638137817, 0.7462911605834961, 0.8166901469230652, 0.8705394268035889, 0.9069424867630005, 0.9301106929779053, 0.9451770782470703, 0.9554725885391235, 0.9628366231918335, 0.9681507349014282, 0.9725351929664612, 0.9771744012832642, 0.9813300371170044, 0.9844985008239746, 0.9868723154067993, 0.9887018203735352], "prob_old": [0.8011013269424438, 0.20945538580417633, 0.009483550675213337, 0.01762530766427517, 0.0028317924588918686, 0.0009492855751886964, 0.00039939573616720736, 0.00023631476506125182, 0.00018556306895334274, 0.00015467751654796302, 0.00011993006046395749, 8.799392526270822e-05, 6.444649625336751e-05, 5.0154339987784624e-05, 4.304436151869595e-05, 3.9417518564732745e-05, 3.8509053410962224e-05, 3.967633892898448e-05, 4.159090531175025e-05, 4.3572661525104195e-05], "prob_new_token": [1.199260623252485e-06, 0.013071048073470592, 0.024024365469813347, 0.12864069640636444, 0.6270905137062073, 0.7053808569908142, 0.7594759464263916, 0.8173948526382446, 0.8658506870269775, 0.9001986384391785, 0.924081027507782, 0.9406506419181824, 0.952038049697876, 0.9595683217048645, 0.9651070833206177, 0.9708189964294434, 0.975890576839447, 0.9797314405441284, 0.9826651811599731, 0.9850080013275146], "prob_old_token": [0.6364644765853882, 0.0035650385543704033, 0.0031322757713496685, 0.003007472027093172, 0.0010283400770276785, 0.00035633606603369117, 0.00013309378118719906, 5.692776176147163e-05, 2.4981456590467133e-05, 1.129156407841947e-05, 5.491613592312206e-06, 2.9896759770053905e-06, 1.8295630752618308e-06, 1.3124143833920243e-06, 1.0533464092077338e-06, 8.090348160294525e-07, 6.239806111807411e-07, 5.084322651782713e-07, 4.2939294075949874e-07, 3.726306374574051e-07], "l1-model.layers.0.mlp.down_proj.weight": [101524.8984375], "l2-model.layers.0.mlp.down_proj.weight": [17.89949607849121], "linf-model.layers.0.mlp.down_proj.weight": [0.00921565294265747], "request": {"prompt": "{} works in the field of", "subject": "Ferdinand T\u00f6nnies", "target_new": {"str": "performance art"}, "old_answer": {"str": "sociology"}, "seed": 42}}, {"loss_per_step": [12.501, 4.825, 3.127, 0.336, 0.09, 0.067, 0.064, 0.061, 0.056, 0.049, 0.042, 0.036, 0.031, 0.026, 0.022, 0.019, 0.016, 0.013, 0.011, 0.009], "prob_new": [3.7219115256448276e-06, 0.00802695658057928, 0.04385187104344368, 0.7144529819488525, 0.9137334227561951, 0.9348332285881042, 0.9377154111862183, 0.9406283497810364, 0.9460011124610901, 0.952289342880249, 0.9585566520690918, 0.9644108414649963, 0.9696733951568604, 0.9743011593818665, 0.9782861471176147, 0.9816641211509705, 0.9845266938209534, 0.9869896173477173, 0.9891467690467834, 0.9910492897033691], "prob_old": [0.8011013269424438, 0.342142790555954, 0.3539070785045624, 0.4328590929508209, 0.3879531919956207, 0.21829156577587128, 0.10440228134393692, 0.07084538042545319, 0.0602308064699173, 0.056616708636283875, 0.05584085360169411, 0.056639499962329865, 0.05844089761376381, 0.06075749173760414, 0.06312330812215805, 0.06514863669872284, 0.0665489211678505, 0.06716863065958023, 0.06697775423526764, 0.0660574659705162], "prob_new_token": [3.7219115256448276e-06, 0.00802695658057928, 0.04385187104344368, 0.7144529819488525, 0.9137334227561951, 0.9348332285881042, 0.9377154111862183, 0.9406283497810364, 0.9460011124610901, 0.952289342880249, 0.9585566520690918, 0.9644108414649963, 0.9696733951568604, 0.9743011593818665, 0.9782861471176147, 0.9816641211509705, 0.9845266938209534, 0.9869896173477173, 0.9891467690467834, 0.9910492897033691], "prob_old_token": [0.6364644765853882, 0.0013340028235688806, 0.0014724041102454066, 0.0014527527382597327, 0.00021598224702756852, 5.2839612180832773e-05, 2.0541661797324196e-05, 1.1508648640301544e-05, 7.919009476609062e-06, 6.154651146061951e-06, 5.119976776768453e-06, 4.377728146209847e-06, 3.764520215554512e-06, 3.2236703191301785e-06, 2.73518912763393e-06, 2.290974862262374e-06, 1.8871608062909218e-06, 1.5225261904561194e-06, 1.1982660907960963e-06, 9.170381645162706e-07], "l1-model.layers.0.mlp.down_proj.weight": [100198.1796875], "l2-model.layers.0.mlp.down_proj.weight": [17.259906768798828], "linf-model.layers.0.mlp.down_proj.weight": [0.008810751140117645], "request": {"prompt": "{} works in the field of", "subject": "Ferdinand T\u00f6nnies", "target_new": {"str": "architecture"}, "old_answer": {"str": "sociology"}, "seed": 42}}, {"loss_per_step": [11.007, 3.633, 2.283, 1.463, 0.383, 0.104, 0.038, 0.02, 0.018, 0.018, 0.02, 0.019, 0.014, 0.01], "prob_new": [3.2383202778873965e-05, 0.4788466989994049, 0.47512656450271606, 0.5196870565414429, 0.7316888570785522, 0.9055041074752808, 0.9635069370269775, 0.9799498319625854, 0.9827126264572144, 0.9818243980407715, 0.9801759719848633, 0.9814558029174805, 0.9860018491744995, 0.9902886152267456], "prob_old": [0.6585456132888794, 0.2883002460002899, 0.11918798089027405, 0.15281908214092255, 0.07822752743959427, 0.04823968559503555, 0.030894724652171135, 0.02561507746577263, 0.024324161931872368, 0.023354150354862213, 0.021707158535718918, 0.020439347252249718, 0.01952546276152134, 0.018831642344594002], "prob_new_token": [4.568416898109717e-06, 0.0007306202314794064, 0.011061836034059525, 0.05447430908679962, 0.4661286473274231, 0.8131110668182373, 0.9287627339363098, 0.9614276885986328, 0.9668999314308167, 0.9652355313301086, 0.9621290564537048, 0.964704692363739, 0.9735972881317139, 0.9819265007972717], "prob_old_token": [0.9329678416252136, 0.006952183786779642, 0.0060961744748055935, 0.0019405129132792354, 7.20182215445675e-05, 3.8225360185606405e-05, 7.181298315117601e-06, 2.219961743321619e-06, 1.2115887102481793e-06, 9.631470447857282e-07, 1.0486286328159622e-06, 9.974168051485321e-07, 7.256984417836065e-07, 4.868601877205947e-07], "l1-model.layers.0.mlp.down_proj.weight": [90072.265625], "l2-model.layers.0.mlp.down_proj.weight": [15.27646541595459], "linf-model.layers.0.mlp.down_proj.weight": [0.005925041623413563], "request": {"prompt": "{} works in the field of", "subject": "Marina Abramovi\u0107", "target_new": {"str": "sociology"}, "old_answer": {"str": "performance art"}, "seed": 42}}, {"loss_per_step": [13.656, 4.5, 1.139, 1.639, 0.344, 0.368, 0.255, 0.173, 0.112, 0.082, 0.062, 0.049, 0.04, 0.034, 0.03, 0.027, 0.024, 0.021, 0.019, 0.017], "prob_new": [1.1725484228009009e-06, 0.011111942119896412, 0.32021471858024597, 0.19423310458660126, 0.7090347409248352, 0.6917788982391357, 0.7751480937004089, 0.8413662314414978, 0.8938960433006287, 0.921494722366333, 0.9399636387825012, 0.9522920846939087, 0.9605975151062012, 0.9663576483726501, 0.9705498814582825, 0.9738109111785889, 0.976527750492096, 0.9789125919342041, 0.9810688495635986, 0.9830424189567566], "prob_old": [0.6585456132888794, 0.22818274796009064, 0.09089742600917816, 0.040551088750362396, 0.1418495923280716, 0.12086843699216843, 0.15463903546333313, 0.15440702438354492, 0.14650394022464752, 0.1423904001712799, 0.13059891760349274, 0.11576530337333679, 0.10255955159664154, 0.09246563911437988, 0.08521149307489395, 0.08010198920965195, 0.07651573419570923, 0.07398384809494019, 0.07217063754796982, 0.07083778828382492], "prob_new_token": [1.1725484228009009e-06, 0.011111942119896412, 0.32021471858024597, 0.19423310458660126, 0.7090347409248352, 0.6917788982391357, 0.7751480937004089, 0.8413662314414978, 0.8938960433006287, 0.921494722366333, 0.9399636387825012, 0.9522920846939087, 0.9605975151062012, 0.9663576483726501, 0.9705498814582825, 0.9738109111785889, 0.976527750492096, 0.9789125919342041, 0.9810688495635986, 0.9830424189567566], "prob_old_token": [0.9329678416252136, 0.03366532549262047, 0.0004185412253718823, 1.0279948583047371e-05, 5.2899715228704736e-05, 7.263057341333479e-05, 5.9364883782109246e-05, 5.9467671235324815e-05, 3.6862602428300306e-05, 2.4611726985312998e-05, 1.5377014278783463e-05, 9.287283319281414e-06, 5.794422577309888e-06, 3.86037390853744e-06, 2.7562075501919026e-06, 2.0849836346314987e-06, 1.6460312508570496e-06, 1.3382675660977839e-06, 1.1091665328422096e-06, 9.303598744736519e-07], "l1-model.layers.0.mlp.down_proj.weight": [99393.7890625], "l2-model.layers.0.mlp.down_proj.weight": [17.397878646850586], "linf-model.layers.0.mlp.down_proj.weight": [0.008602488785982132], "request": {"prompt": "{} works in the field of", "subject": "Marina Abramovi\u0107", "target_new": {"str": "architecture"}, "old_answer": {"str": "performance art"}, "seed": 42}}, {"loss_per_step": [5.665, 2.242, 0.553, 0.186, 0.081, 0.05, 0.035, 0.027, 0.024, 0.023, 0.023, 0.019, 0.015, 0.01, 0.008], "prob_new": [0.003466708119958639, 0.10620567202568054, 0.5752054452896118, 0.83026522397995, 0.9226133823394775, 0.9512671828269958, 0.9652729034423828, 0.973021924495697, 0.9765904545783997, 0.9772520065307617, 0.9776068329811096, 0.9807430505752563, 0.9855826497077942, 0.9895812273025513, 0.9923130869865417], "prob_old": [0.7119747400283813, 0.020383652299642563, 0.00013832503464072943, 8.421694656135514e-05, 4.366693246993236e-05, 3.4148073609685525e-05, 3.0240249543567188e-05, 3.0713737942278385e-05, 3.7697456718888134e-05, 5.40513610758353e-05, 7.213844946818426e-05, 7.080809882609174e-05, 5.286637679091655e-05, 3.686119453050196e-05, 2.697962372621987e-05], "prob_new_token": [0.003466708119958639, 0.10620567202568054, 0.5752054452896118, 0.83026522397995, 0.9226133823394775, 0.9512671828269958, 0.9652729034423828, 0.973021924495697, 0.9765904545783997, 0.9772520065307617, 0.9776068329811096, 0.9807430505752563, 0.9855826497077942, 0.9895812273025513, 0.9923130869865417], "prob_old_token": [0.7119747400283813, 0.020383652299642563, 0.00013832503464072943, 8.421694656135514e-05, 4.366693246993236e-05, 3.4148073609685525e-05, 3.0240249543567188e-05, 3.0713737942278385e-05, 3.7697456718888134e-05, 5.40513610758353e-05, 7.213844946818426e-05, 7.080809882609174e-05, 5.286637679091655e-05, 3.686119453050196e-05, 2.697962372621987e-05], "l1-model.layers.0.mlp.down_proj.weight": [98415.5625], "l2-model.layers.0.mlp.down_proj.weight": [16.311635971069336], "linf-model.layers.0.mlp.down_proj.weight": [0.00682600773870945], "request": {"prompt": "{} is a virtuoso on the", "subject": "Robert Schumann", "target_new": {"str": "guitar"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [5.375, 3.268, 1.705, 0.219, 0.074, 0.048, 0.035, 0.025, 0.018, 0.014, 0.01, 0.008], "prob_new": [0.05410191789269447, 0.3091276288032532, 0.4722617566585541, 0.8190569877624512, 0.9301004409790039, 0.9542199373245239, 0.9660626649856567, 0.9752095341682434, 0.9820212125778198, 0.9866489171981812, 0.9897876381874084, 0.9920177459716797], "prob_old": [0.7119747400283813, 0.03794744610786438, 0.01994740031659603, 4.513602834776975e-05, 2.150074033124838e-05, 2.670706635399256e-05, 2.713196045078803e-05, 2.181222771469038e-05, 1.5509740478591993e-05, 1.0301951988367364e-05, 6.781761385354912e-06, 4.6203772399167065e-06], "prob_new_token": [0.00019850555690936744, 0.002355986973270774, 0.03637998178601265, 0.6594416499137878, 0.8760335445404053, 0.919949471950531, 0.9386708736419678, 0.953741192817688, 0.9657326340675354, 0.9742265343666077, 0.9801381230354309, 0.9844080805778503], "prob_old_token": [0.7119747400283813, 0.03794744610786438, 0.01994740031659603, 4.513602834776975e-05, 2.150074033124838e-05, 2.670706635399256e-05, 2.713196045078803e-05, 2.181222771469038e-05, 1.5509740478591993e-05, 1.0301951988367364e-05, 6.781761385354912e-06, 4.6203772399167065e-06], "l1-model.layers.0.mlp.down_proj.weight": [80358.8125], "l2-model.layers.0.mlp.down_proj.weight": [13.977572441101074], "linf-model.layers.0.mlp.down_proj.weight": [0.005423206835985184], "request": {"prompt": "{} is a virtuoso on the", "subject": "Robert Schumann", "target_new": {"str": "sitar"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [2.06, 1.02, 0.67, 0.118, 0.042, 0.021, 0.011, 0.005], "prob_new": [0.7276716232299805, 0.7423679828643799, 0.7533779144287109, 0.9017506837844849, 0.9602042436599731, 0.9794331789016724, 0.9896230101585388, 0.9948949813842773], "prob_old": [0.7119747400283813, 0.029400065541267395, 0.017923112958669662, 0.00025624444242566824, 1.3211452824180014e-05, 8.700648322701454e-06, 4.2645483517844696e-06, 2.0290137854317436e-06], "prob_new_token": [0.0002891868643928319, 0.01777416653931141, 0.07267391681671143, 0.6586958765983582, 0.8722591996192932, 0.9361980557441711, 0.9695411324501038, 0.9863727688789368], "prob_old_token": [0.7119747400283813, 0.029400065541267395, 0.017923112958669662, 0.00025624444242566824, 1.3211452824180014e-05, 8.700648322701454e-06, 4.2645483517844696e-06, 2.0290137854317436e-06], "l1-model.layers.0.mlp.down_proj.weight": [70689.421875], "l2-model.layers.0.mlp.down_proj.weight": [11.744731903076172], "linf-model.layers.0.mlp.down_proj.weight": [0.003497302532196045], "request": {"prompt": "{} is a virtuoso on the", "subject": "Robert Schumann", "target_new": {"str": "saxophone"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [1.559, 0.982, 0.961, 0.485, 0.096, 0.037, 0.023, 0.018, 0.016, 0.015, 0.013, 0.01], "prob_new": [0.6652560830116272, 0.6810869574546814, 0.6839696764945984, 0.744076669216156, 0.9163696765899658, 0.9645912647247314, 0.9774136543273926, 0.9825077056884766, 0.9844371676445007, 0.9856548309326172, 0.9876715540885925, 0.9904524683952332], "prob_old": [0.6396934390068054, 0.017719868570566177, 0.023268653079867363, 0.00109450938180089, 0.0005031061009503901, 0.00030403383425436914, 0.00026831304421648383, 0.00029622321017086506, 0.0003658079367596656, 0.00040742388227954507, 0.00037025322671979666, 0.00027833940112031996], "prob_new_token": [0.009442240931093693, 0.05311448499560356, 0.056269384920597076, 0.23386622965335846, 0.7502239346504211, 0.8949345350265503, 0.9335229396820068, 0.9489651918411255, 0.954980731010437, 0.9588809609413147, 0.965052604675293, 0.9732768535614014], "prob_old_token": [0.6396934390068054, 0.017719868570566177, 0.023268653079867363, 0.00109450938180089, 0.0005031061009503901, 0.00030403383425436914, 0.00026831304421648383, 0.00029622321017086506, 0.0003658079367596656, 0.00040742388227954507, 0.00037025322671979666, 0.00027833940112031996], "l1-model.layers.0.mlp.down_proj.weight": [83304.28125], "l2-model.layers.0.mlp.down_proj.weight": [14.286972999572754], "linf-model.layers.0.mlp.down_proj.weight": [0.005414698272943497], "request": {"prompt": "{} is a virtuoso on the", "subject": "Jeff Goldblum", "target_new": {"str": "trumpet"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [4.309, 2.487, 1.862, 0.661, 0.23, 0.16, 0.128, 0.089, 0.063, 0.048, 0.039, 0.033, 0.028, 0.025, 0.022, 0.02, 0.017, 0.015, 0.013, 0.011], "prob_new": [0.08113676309585571, 0.1699279397726059, 0.5021392703056335, 0.6315767765045166, 0.8141580820083618, 0.8614291548728943, 0.8853412866592407, 0.9168109893798828, 0.9393600225448608, 0.9533708095550537, 0.9621737003326416, 0.9679643511772156, 0.9720511436462402, 0.975214958190918, 0.9779231548309326, 0.9804221391677856, 0.9827935099601746, 0.9850181937217712, 0.9870472550392151, 0.9888463020324707], "prob_old": [0.6396934390068054, 0.01760704256594181, 0.03726794198155403, 0.035033755004405975, 0.008910615928471088, 0.0025844848714768887, 0.0010884677758440375, 0.0005341367213986814, 0.0003305220161564648, 0.0002509704208932817, 0.00021499520516954362, 0.00019733096996787935, 0.00018835486844182014, 0.00018305701087228954, 0.00017815826868172735, 0.00017156056128442287, 0.00016239186516031623, 0.00015089154476299882, 0.00013796694111078978, 0.00012463194434531033], "prob_new_token": [0.0011218603467568755, 0.021755026653409004, 0.024618767201900482, 0.26807016134262085, 0.6352975368499756, 0.7373589873313904, 0.7899895310401917, 0.8564072251319885, 0.9001398682594299, 0.9251094460487366, 0.9401372671127319, 0.9497901797294617, 0.9564568996429443, 0.961499810218811, 0.9657300710678101, 0.9695871472358704, 0.9732333421707153, 0.9766567349433899, 0.9797853827476501, 0.9825666546821594], "prob_old_token": [0.6396934390068054, 0.01760704256594181, 0.03726794198155403, 0.035033755004405975, 0.008910615928471088, 0.0025844848714768887, 0.0010884677758440375, 0.0005341367213986814, 0.0003305220161564648, 0.0002509704208932817, 0.00021499520516954362, 0.00019733096996787935, 0.00018835486844182014, 0.00018305701087228954, 0.00017815826868172735, 0.00017156056128442287, 0.00016239186516031623, 0.00015089154476299882, 0.00013796694111078978, 0.00012463194434531033], "l1-model.layers.0.mlp.down_proj.weight": [101984.7734375], "l2-model.layers.0.mlp.down_proj.weight": [17.696550369262695], "linf-model.layers.0.mlp.down_proj.weight": [0.009109260514378548], "request": {"prompt": "{} is a virtuoso on the", "subject": "Jeff Goldblum", "target_new": {"str": "sitar"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [3.756, 1.836, 0.973, 0.373, 0.068, 0.023, 0.011, 0.006], "prob_new": [0.3286236524581909, 0.4597915709018707, 0.5579386949539185, 0.7340124249458313, 0.9358553886413574, 0.9769895076751709, 0.9890003800392151, 0.9938350915908813], "prob_old": [0.6396934390068054, 0.02422536164522171, 0.019095128402113914, 0.0063829985447227955, 0.0011516603408381343, 0.00016109109856188297, 5.049175160820596e-05, 2.459309689584188e-05], "prob_new_token": [0.0008327914401888847, 0.028537584468722343, 0.1476067453622818, 0.4798126518726349, 0.8861585855484009, 0.9612768888473511, 0.9809196591377258, 0.9897146821022034], "prob_old_token": [0.6396934390068054, 0.02422536164522171, 0.019095128402113914, 0.0063829985447227955, 0.0011516603408381343, 0.00016109109856188297, 5.049175160820596e-05, 2.459309689584188e-05], "l1-model.layers.0.mlp.down_proj.weight": [70994.078125], "l2-model.layers.0.mlp.down_proj.weight": [11.68937873840332], "linf-model.layers.0.mlp.down_proj.weight": [0.0034999363124370575], "request": {"prompt": "{} is a virtuoso on the", "subject": "Jeff Goldblum", "target_new": {"str": "flute"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [5.055, 0.964, 0.175, 0.145, 0.073, 0.063, 0.057, 0.051, 0.046, 0.041, 0.037, 0.032, 0.028, 0.024, 0.02, 0.016, 0.013, 0.011, 0.009], "prob_new": [0.006377784069627523, 0.381267786026001, 0.8392077088356018, 0.8653978705406189, 0.9292805194854736, 0.9392237067222595, 0.9445968866348267, 0.9502568244934082, 0.9553537368774414, 0.9598935842514038, 0.9640932679176331, 0.9681940674781799, 0.9723768830299377, 0.9765989184379578, 0.9805904626846313, 0.9840818047523499, 0.9869670271873474, 0.9892701506614685, 0.9910645484924316], "prob_old": [0.6505565047264099, 0.03180503472685814, 0.007433952298015356, 0.001158842584118247, 0.0014072005869820714, 0.0014338470064103603, 0.0014187539927661419, 0.0014694926794618368, 0.0015238452469930053, 0.0015705365221947432, 0.0015913175884634256, 0.0015518900472670794, 0.001423274981789291, 0.001215002266690135, 0.000975392060354352, 0.0007544127875007689, 0.0005767145194113255, 0.0004440319899003953, 0.00034811426303349435], "prob_new_token": [0.006377784069627523, 0.381267786026001, 0.8392077088356018, 0.8653978705406189, 0.9292805194854736, 0.9392237067222595, 0.9445968866348267, 0.9502568244934082, 0.9553537368774414, 0.9598935842514038, 0.9640932679176331, 0.9681940674781799, 0.9723768830299377, 0.9765989184379578, 0.9805904626846313, 0.9840818047523499, 0.9869670271873474, 0.9892701506614685, 0.9910645484924316], "prob_old_token": [0.6505565047264099, 0.03180503472685814, 0.007433952298015356, 0.001158842584118247, 0.0014072005869820714, 0.0014338470064103603, 0.0014187539927661419, 0.0014694926794618368, 0.0015238452469930053, 0.0015705365221947432, 0.0015913175884634256, 0.0015518900472670794, 0.001423274981789291, 0.001215002266690135, 0.000975392060354352, 0.0007544127875007689, 0.0005767145194113255, 0.0004440319899003953, 0.00034811426303349435], "l1-model.layers.0.mlp.down_proj.weight": [100963.5546875], "l2-model.layers.0.mlp.down_proj.weight": [16.830535888671875], "linf-model.layers.0.mlp.down_proj.weight": [0.008544035255908966], "request": {"prompt": "{} is a virtuoso on the", "subject": "Anton Rubinstein", "target_new": {"str": "organ"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [6.057, 2.671, 1.845, 0.9, 0.435, 0.226, 0.157, 0.073, 0.04, 0.029, 0.024, 0.019, 0.014, 0.012, 0.011, 0.012, 0.012, 0.011, 0.009], "prob_new": [0.03031427040696144, 0.09070143103599548, 0.22684568166732788, 0.4414006471633911, 0.647351861000061, 0.7981686592102051, 0.8581128120422363, 0.9301131963729858, 0.9609344601631165, 0.9718456268310547, 0.9760707020759583, 0.9811699986457825, 0.9858745336532593, 0.9882824420928955, 0.9888756275177002, 0.988552451133728, 0.9885376691818237, 0.9895060062408447, 0.9907351732254028], "prob_old": [0.6505565047264099, 0.02774455025792122, 0.04581838846206665, 0.01236348133534193, 0.002049206057563424, 0.00036833909689448774, 0.00043556111631914973, 0.00022566226834896952, 0.00013416782894637436, 9.004708408610895e-05, 6.088574809837155e-05, 3.95225033571478e-05, 2.4283548555104062e-05, 1.4928606105968356e-05, 1.1238471415708773e-05, 1.1558287042134907e-05, 1.2315073036006652e-05, 1.0775451301014982e-05, 9.126762051892001e-06], "prob_new_token": [9.050060907611623e-05, 0.032006893306970596, 0.06413108110427856, 0.26938995718955994, 0.6659693717956543, 0.7714886665344238, 0.7821900844573975, 0.9010561108589172, 0.9505618810653687, 0.9708880186080933, 0.9812406301498413, 0.9871473908424377, 0.9906505942344666, 0.9925467371940613, 0.993053138256073, 0.9924352169036865, 0.9920569658279419, 0.9929677248001099, 0.9940541386604309], "prob_old_token": [0.6505565047264099, 0.02774455025792122, 0.04581838846206665, 0.01236348133534193, 0.002049206057563424, 0.00036833909689448774, 0.00043556111631914973, 0.00022566226834896952, 0.00013416782894637436, 9.004708408610895e-05, 6.088574809837155e-05, 3.95225033571478e-05, 2.4283548555104062e-05, 1.4928606105968356e-05, 1.1238471415708773e-05, 1.1558287042134907e-05, 1.2315073036006652e-05, 1.0775451301014982e-05, 9.126762051892001e-06], "l1-model.layers.0.mlp.down_proj.weight": [104005.15625], "l2-model.layers.0.mlp.down_proj.weight": [17.86282730102539], "linf-model.layers.0.mlp.down_proj.weight": [0.008182482793927193], "request": {"prompt": "{} is a virtuoso on the", "subject": "Anton Rubinstein", "target_new": {"str": "bass guitar"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [4.91, 2.451, 1.424, 0.567, 0.16, 0.047, 0.025, 0.01], "prob_new": [0.4738840162754059, 0.49571719765663147, 0.5226712226867676, 0.6584008932113647, 0.8615061044692993, 0.9548105001449585, 0.9755144119262695, 0.9901168346405029], "prob_old": [0.6505565047264099, 0.03881875425577164, 0.03818536922335625, 0.006461523938924074, 0.0006345426081679761, 2.141889854101464e-05, 3.4837246403185418e-06, 2.8971078336326173e-06], "prob_new_token": [5.736320235882886e-05, 0.007555421441793442, 0.0587647408246994, 0.3243153691291809, 0.7347318530082703, 0.9223254323005676, 0.9558724164962769, 0.9838352799415588], "prob_old_token": [0.6505565047264099, 0.03881875425577164, 0.03818536922335625, 0.006461523938924074, 0.0006345426081679761, 2.141889854101464e-05, 3.4837246403185418e-06, 2.8971078336326173e-06], "l1-model.layers.0.mlp.down_proj.weight": [67939.890625], "l2-model.layers.0.mlp.down_proj.weight": [11.420897483825684], "linf-model.layers.0.mlp.down_proj.weight": [0.003510436974465847], "request": {"prompt": "{} is a virtuoso on the", "subject": "Anton Rubinstein", "target_new": {"str": "banjo"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [3.355, 2.232, 1.152, 0.946, 0.707, 0.621, 0.509, 0.427, 0.393, 0.311, 0.197, 0.166, 0.124, 0.086, 0.052, 0.037, 0.032, 0.026, 0.021, 0.017], "prob_new": [0.1562681794166565, 0.1946096420288086, 0.42119133472442627, 0.5179957151412964, 0.6226304173469543, 0.6536950469017029, 0.6936022043228149, 0.7249017953872681, 0.7407001256942749, 0.7738276124000549, 0.8363161683082581, 0.8597776889801025, 0.8904920220375061, 0.9203104376792908, 0.9496597051620483, 0.9642845988273621, 0.9682830572128296, 0.9741138815879822, 0.9792658090591431, 0.9828993678092957], "prob_old": [0.8571499586105347, 0.5006355047225952, 0.49896469712257385, 0.49829456210136414, 0.49740514159202576, 0.49537333846092224, 0.49391403794288635, 0.49178341031074524, 0.4864572286605835, 0.4886220693588257, 0.48350271582603455, 0.47688257694244385, 0.4775303304195404, 0.4862309992313385, 0.4898398220539093, 0.4915284812450409, 0.4932124614715576, 0.4950430393218994, 0.4965009093284607, 0.4975138306617737], "prob_new_token": [0.0010338777210563421, 0.025812353938817978, 0.4355134069919586, 0.579889178276062, 0.8450530171394348, 0.8933194279670715, 0.9085334539413452, 0.9177541136741638, 0.9275065064430237, 0.9162899851799011, 0.9421941041946411, 0.9640118479728699, 0.9756364822387695, 0.981283962726593, 0.9872235059738159, 0.9918458461761475, 0.994515597820282, 0.9958898425102234, 0.9966532588005066, 0.9971737265586853], "prob_old_token": [0.7155489325523376, 0.009644150733947754, 0.0011085632722824812, 0.0005522666033357382, 0.0001966979616554454, 6.244800897547975e-05, 4.726220140582882e-05, 3.510108945192769e-05, 2.4678289264556952e-05, 2.2706341042066924e-05, 8.777188668318558e-06, 6.1735272538498975e-06, 4.29561032433412e-06, 5.2140831030556e-06, 5.5633454394410364e-06, 4.499773694988107e-06, 3.3543030895089032e-06, 2.6212603643216426e-06, 2.289417579959263e-06, 2.166314288842841e-06], "l1-model.layers.0.mlp.down_proj.weight": [101460.671875], "l2-model.layers.0.mlp.down_proj.weight": [17.952266693115234], "linf-model.layers.0.mlp.down_proj.weight": [0.009231563657522202], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Tarja Halonen", "target_new": {"str": "Russian. The language"}, "old_answer": {"str": "Finnish"}, "seed": 42}}, {"loss_per_step": [3.718, 2.194, 0.631, 0.35, 0.186, 0.108, 0.077, 0.058, 0.043, 0.032, 0.024, 0.018, 0.014, 0.011, 0.008], "prob_new": [0.22710631787776947, 0.3270881772041321, 0.5363421440124512, 0.7050234079360962, 0.8316928148269653, 0.8983378410339355, 0.9268033504486084, 0.9444576501846313, 0.958178699016571, 0.9688538908958435, 0.9766701459884644, 0.9821483492851257, 0.986331582069397, 0.9895385503768921, 0.9918792843818665], "prob_old": [0.8571499586105347, 0.5275687575340271, 0.49905872344970703, 0.49919992685317993, 0.49901700019836426, 0.49898138642311096, 0.49907970428466797, 0.4992084801197052, 0.4993390440940857, 0.49944403767585754, 0.499498575925827, 0.499542236328125, 0.499604195356369, 0.4996613562107086, 0.4997147023677826], "prob_new_token": [0.0013007732341066003, 0.0195673406124115, 0.46923530101776123, 0.7083523273468018, 0.8730918169021606, 0.9431775808334351, 0.9633510112762451, 0.9725242257118225, 0.9787549376487732, 0.9835785031318665, 0.9876996278762817, 0.9905034899711609, 0.9930679798126221, 0.995419979095459, 0.9972386360168457], "prob_old_token": [0.7155489325523376, 0.06025693938136101, 0.0005263343336991966, 0.00021225186355877668, 3.3978438295889646e-05, 1.3813250006933231e-05, 9.244681677955668e-06, 7.444252332788892e-06, 6.107669832999818e-06, 5.28960526935407e-06, 4.817802619072609e-06, 4.557011834549485e-06, 3.7950192108837655e-06, 2.800362381094601e-06, 1.8728944723989116e-06], "l1-model.layers.0.mlp.down_proj.weight": [91301.609375], "l2-model.layers.0.mlp.down_proj.weight": [15.848897933959961], "linf-model.layers.0.mlp.down_proj.weight": [0.006843134760856628], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Tarja Halonen", "target_new": {"str": "German."}, "old_answer": {"str": "Finnish"}, "seed": 42}}, {"loss_per_step": [8.368, 5.176, 4.153, 1.899, 0.65, 0.33, 0.346, 0.102, 0.087, 0.051, 0.038, 0.034, 0.031, 0.026, 0.021, 0.015, 0.011, 0.008], "prob_new": [0.0002321712381672114, 0.005650064442306757, 0.01572044938802719, 0.14972913265228271, 0.5221388936042786, 0.7188032269477844, 0.7076774835586548, 0.9030985236167908, 0.9168561697006226, 0.9502244591712952, 0.9631317257881165, 0.9669366478919983, 0.9698716402053833, 0.9742761254310608, 0.9796967506408691, 0.9849121570587158, 0.989099383354187, 0.9921082854270935], "prob_old": [0.8571499586105347, 0.5149909853935242, 0.685431182384491, 0.4986330568790436, 0.5004384517669678, 0.49967965483665466, 0.5001567602157593, 0.499527245759964, 0.4993512034416199, 0.4993673264980316, 0.49933528900146484, 0.4992596507072449, 0.49923092126846313, 0.49925994873046875, 0.49930816888809204, 0.4993496835231781, 0.49937668442726135, 0.4993957281112671], "prob_new_token": [0.0002321712381672114, 0.005650064442306757, 0.01572044938802719, 0.14972913265228271, 0.5221388936042786, 0.7188032269477844, 0.7076774835586548, 0.9030985236167908, 0.9168561697006226, 0.9502244591712952, 0.9631317257881165, 0.9669366478919983, 0.9698716402053833, 0.9742761254310608, 0.9796967506408691, 0.9849121570587158, 0.989099383354187, 0.9921082854270935], "prob_old_token": [0.7155489325523376, 0.034032583236694336, 0.37239792943000793, 0.004092104732990265, 0.003281532321125269, 0.00045004868297837675, 0.0015940675511956215, 0.0003217751800548285, 0.0001466970716137439, 0.00010227150050923228, 7.821814506314695e-05, 7.048234692774713e-05, 7.014258153503761e-05, 6.68354841764085e-05, 5.599170253844932e-05, 4.233268555253744e-05, 3.091580583713949e-05, 2.274871621921193e-05], "l1-model.layers.0.mlp.down_proj.weight": [89892.953125], "l2-model.layers.0.mlp.down_proj.weight": [16.09867286682129], "linf-model.layers.0.mlp.down_proj.weight": [0.007309582084417343], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Tarja Halonen", "target_new": {"str": "Japanese"}, "old_answer": {"str": "Finnish"}, "seed": 42}}, {"loss_per_step": [5.643, 3.285, 2.633, 2.195, 0.55, 0.262, 0.178, 0.125, 0.083, 0.05, 0.028, 0.017, 0.012, 0.01], "prob_new": [0.1221219152212143, 0.35659486055374146, 0.4872055649757385, 0.5130881071090698, 0.6449731588363647, 0.7810049057006836, 0.8420916795730591, 0.8847994804382324, 0.9214037656784058, 0.9512794017791748, 0.9724119901657104, 0.9830116629600525, 0.9880012273788452, 0.9904705286026001], "prob_old": [0.6000204682350159, 0.32436639070510864, 0.21303430199623108, 0.1766197830438614, 0.014082892797887325, 0.001747103058733046, 0.0006644389941357076, 0.0001955534826265648, 8.288265962619334e-05, 4.5020340621704236e-05, 3.3134019759017974e-05, 3.0498127671307884e-05, 3.105003997916356e-05, 3.463644316070713e-05], "prob_new_token": [3.6943125451216474e-05, 0.0002250856050522998, 0.0007425760850310326, 0.0025173500180244446, 0.3012984097003937, 0.6582255959510803, 0.7894722819328308, 0.8722414970397949, 0.9273532032966614, 0.9581901431083679, 0.9723032712936401, 0.9790854454040527, 0.9833216667175293, 0.9856733679771423], "prob_old_token": [0.6000204682350159, 0.32436639070510864, 0.21303430199623108, 0.1766197830438614, 0.014082892797887325, 0.001747103058733046, 0.0006644389941357076, 0.0001955534826265648, 8.288265962619334e-05, 4.5020340621704236e-05, 3.3134019759017974e-05, 3.0498127671307884e-05, 3.105003997916356e-05, 3.463644316070713e-05], "l1-model.layers.0.mlp.down_proj.weight": [87379.9765625], "l2-model.layers.0.mlp.down_proj.weight": [15.434447288513184], "linf-model.layers.0.mlp.down_proj.weight": [0.006457062438130379], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Vladimir Vysotsky", "target_new": {"str": "Khmer."}, "old_answer": {"str": "Russian"}, "seed": 42}}, {"loss_per_step": [4.962, 2.827, 0.857, 0.471, 0.266, 0.141, 0.094, 0.063, 0.042, 0.03, 0.023, 0.02, 0.016, 0.014, 0.012, 0.01, 0.009], "prob_new": [0.3133165240287781, 0.48295989632606506, 0.5721684694290161, 0.6838371157646179, 0.7886512279510498, 0.8751792907714844, 0.913202166557312, 0.9403712749481201, 0.9595204591751099, 0.9708168506622314, 0.9771307706832886, 0.9807825088500977, 0.9839051961898804, 0.9865598678588867, 0.9885398149490356, 0.9900510907173157, 0.9912039041519165], "prob_old": [0.6000204682350159, 0.3374152183532715, 0.03752686828374863, 0.013987844809889793, 0.008228043094277382, 0.005641627125442028, 0.003599978983402252, 0.0021549116354435682, 0.00127947807777673, 0.0009148662211373448, 0.0007855012663640082, 0.0007217880920507014, 0.0005924763972871006, 0.0004263145674485713, 0.0002979367272928357, 0.00021728755382355303, 0.00017083631246350706], "prob_new_token": [7.811676186975092e-05, 0.0036374677438288927, 0.18834520876407623, 0.4043394923210144, 0.601389467716217, 0.7679053544998169, 0.8398517370223999, 0.8912314176559448, 0.9275036454200745, 0.9486129879951477, 0.9601514935493469, 0.9667491912841797, 0.9724408984184265, 0.9772664308547974, 0.9808431267738342, 0.9835575819015503, 0.9856114983558655], "prob_old_token": [0.6000204682350159, 0.3374152183532715, 0.03752686828374863, 0.013987844809889793, 0.008228043094277382, 0.005641627125442028, 0.003599978983402252, 0.0021549116354435682, 0.00127947807777673, 0.0009148662211373448, 0.0007855012663640082, 0.0007217880920507014, 0.0005924763972871006, 0.0004263145674485713, 0.0002979367272928357, 0.00021728755382355303, 0.00017083631246350706], "l1-model.layers.0.mlp.down_proj.weight": [96262.796875], "l2-model.layers.0.mlp.down_proj.weight": [16.635225296020508], "linf-model.layers.0.mlp.down_proj.weight": [0.00734792510047555], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Vladimir Vysotsky", "target_new": {"str": "Serbian"}, "old_answer": {"str": "Russian"}, "seed": 42}}, {"loss_per_step": [4.135, 1.375, 0.869, 0.416, 0.119, 0.088, 0.07, 0.039, 0.026, 0.02, 0.017, 0.014, 0.013, 0.011, 0.009], "prob_new": [0.49752283096313477, 0.5309606790542603, 0.5875120162963867, 0.717385470867157, 0.893852710723877, 0.9192845821380615, 0.9344428181648254, 0.9620399475097656, 0.9745947122573853, 0.9803572297096252, 0.9836241006851196, 0.9858447313308716, 0.9876247644424438, 0.9892065525054932, 0.9906489849090576], "prob_old": [0.6000204682350159, 0.3233093321323395, 0.15454070270061493, 0.04008723795413971, 0.015690306201577187, 0.015139156952500343, 0.010814403183758259, 0.004356008023023605, 0.0016841375036165118, 0.000835245184134692, 0.0005180401494726539, 0.00036837460356764495, 0.0002792899904306978, 0.00021621843916364014, 0.00016803927428554744], "prob_new_token": [0.000257354840869084, 0.06411401927471161, 0.17586298286914825, 0.43573451042175293, 0.7884722948074341, 0.8392422199249268, 0.8694151043891907, 0.9245100021362305, 0.9495566487312317, 0.9610373973846436, 0.96753990650177, 0.9719581007957458, 0.9754999876022339, 0.9786478877067566, 0.9815188646316528], "prob_old_token": [0.6000204682350159, 0.3233093321323395, 0.15454070270061493, 0.04008723795413971, 0.015690306201577187, 0.015139156952500343, 0.010814403183758259, 0.004356008023023605, 0.0016841375036165118, 0.000835245184134692, 0.0005180401494726539, 0.00036837460356764495, 0.0002792899904306978, 0.00021621843916364014, 0.00016803927428554744], "l1-model.layers.0.mlp.down_proj.weight": [90392.34375], "l2-model.layers.0.mlp.down_proj.weight": [15.71949291229248], "linf-model.layers.0.mlp.down_proj.weight": [0.0065672132186591625], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Vladimir Vysotsky", "target_new": {"str": "Hebrew"}, "old_answer": {"str": "Russian"}, "seed": 42}}, {"loss_per_step": [6.008, 4.675, 3.768, 2.791, 1.997, 0.399, 0.102, 0.051, 0.036, 0.028, 0.019, 0.01, 0.006], "prob_new": [0.49568334221839905, 0.4942079484462738, 0.4954923987388611, 0.4986400008201599, 0.5072807669639587, 0.7245762348175049, 0.9077144861221313, 0.9511083364486694, 0.9655531644821167, 0.9723915457725525, 0.9810866117477417, 0.9900023937225342, 0.9944205284118652], "prob_old": [0.7656696438789368, 0.1598743200302124, 0.29453539848327637, 0.09343688189983368, 0.05456506833434105, 0.006576613988727331, 0.001005561905913055, 0.0004129945591557771, 0.00020068540470674634, 0.00010637890227371827, 5.866067294846289e-05, 2.7280606445856392e-05, 7.365092642430682e-06], "prob_new_token": [6.099346137489192e-06, 8.798245107755065e-05, 0.0005388319841586053, 0.0037872083485126495, 0.018492860719561577, 0.45142924785614014, 0.8176144361495972, 0.9045320749282837, 0.933772623538971, 0.9475711584091187, 0.9647592902183533, 0.982481062412262, 0.9907934069633484], "prob_old_token": [0.7656696438789368, 0.1598743200302124, 0.29453539848327637, 0.09343688189983368, 0.05456506833434105, 0.006576613988727331, 0.001005561905913055, 0.0004129945591557771, 0.00020068540470674634, 0.00010637890227371827, 5.866067294846289e-05, 2.7280606445856392e-05, 7.365092642430682e-06], "l1-model.layers.0.mlp.down_proj.weight": [79063.4921875], "l2-model.layers.0.mlp.down_proj.weight": [14.219749450683594], "linf-model.layers.0.mlp.down_proj.weight": [0.005794629454612732], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Rafael Correa Delgado", "target_new": {"str": "Hungarian"}, "old_answer": {"str": "Spanish"}, "seed": 42}}, {"loss_per_step": [4.274, 3.928, 2.268, 0.889, 0.639, 0.201, 0.054, 0.029, 0.023, 0.018, 0.013, 0.009], "prob_new": [0.33153167366981506, 0.25425729155540466, 0.46896591782569885, 0.5807260274887085, 0.638027548789978, 0.8337781429290771, 0.9484740495681763, 0.9716430902481079, 0.9775280356407166, 0.9824231863021851, 0.9874874353408813, 0.9910888671875], "prob_old": [0.7656696438789368, 0.6532176733016968, 0.2979874312877655, 0.06382366269826889, 0.01585564762353897, 0.004279606975615025, 0.0013548443093895912, 0.000532514532096684, 0.0003181917127221823, 0.00021435502276290208, 0.00014422551612369716, 0.00010026852396549657], "prob_new_token": [0.0002925312437582761, 0.0007628707098774612, 0.011567381210625172, 0.17051123082637787, 0.27949345111846924, 0.670131266117096, 0.8993636965751648, 0.9450427889823914, 0.956591010093689, 0.9662771224975586, 0.9761860966682434, 0.9831140041351318], "prob_old_token": [0.7656696438789368, 0.6532176733016968, 0.2979874312877655, 0.06382366269826889, 0.01585564762353897, 0.004279606975615025, 0.0013548443093895912, 0.000532514532096684, 0.0003181917127221823, 0.00021435502276290208, 0.00014422551612369716, 0.00010026852396549657], "l1-model.layers.0.mlp.down_proj.weight": [81139.515625], "l2-model.layers.0.mlp.down_proj.weight": [14.154582023620605], "linf-model.layers.0.mlp.down_proj.weight": [0.00534830242395401], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Rafael Correa Delgado", "target_new": {"str": "Hindi"}, "old_answer": {"str": "Spanish"}, "seed": 42}}, {"loss_per_step": [6.71, 5.855, 5.822, 1.582, 0.701, 0.344, 0.218, 0.158, 0.086, 0.062, 0.048, 0.038, 0.032, 0.028, 0.024, 0.022, 0.019, 0.017, 0.015, 0.013], "prob_new": [0.0012190506095066667, 0.0028650478925555944, 0.0029607706237584352, 0.20549051463603973, 0.495989590883255, 0.708767294883728, 0.8040692210197449, 0.8535962700843811, 0.9178254008293152, 0.9398999214172363, 0.9534753561019897, 0.9623655676841736, 0.968278706073761, 0.9725099802017212, 0.9758649468421936, 0.978717565536499, 0.9812020063400269, 0.9833654761314392, 0.98524010181427, 0.9868609309196472], "prob_old": [0.7656696438789368, 0.14199689030647278, 0.06370450556278229, 0.16736145317554474, 0.00633232994005084, 0.03189700469374657, 0.01054054033011198, 0.0038979307282716036, 0.00217229756526649, 0.0014691429678350687, 0.0008113370859064162, 0.0004680451238527894, 0.00030379791860468686, 0.0002176079578930512, 0.00016616584616713226, 0.00013191830657888204, 0.00010733017552411184, 8.876058564055711e-05, 7.420447946060449e-05, 6.243187817744911e-05], "prob_new_token": [0.0012190506095066667, 0.0028650478925555944, 0.0029607706237584352, 0.20549051463603973, 0.495989590883255, 0.708767294883728, 0.8040692210197449, 0.8535962700843811, 0.9178254008293152, 0.9398999214172363, 0.9534753561019897, 0.9623655676841736, 0.968278706073761, 0.9725099802017212, 0.9758649468421936, 0.978717565536499, 0.9812020063400269, 0.9833654761314392, 0.98524010181427, 0.9868609309196472], "prob_old_token": [0.7656696438789368, 0.14199689030647278, 0.06370450556278229, 0.16736145317554474, 0.00633232994005084, 0.03189700469374657, 0.01054054033011198, 0.0038979307282716036, 0.00217229756526649, 0.0014691429678350687, 0.0008113370859064162, 0.0004680451238527894, 0.00030379791860468686, 0.0002176079578930512, 0.00016616584616713226, 0.00013191830657888204, 0.00010733017552411184, 8.876058564055711e-05, 7.420447946060449e-05, 6.243187817744911e-05], "l1-model.layers.0.mlp.down_proj.weight": [96888.8359375], "l2-model.layers.0.mlp.down_proj.weight": [17.3195743560791], "linf-model.layers.0.mlp.down_proj.weight": [0.00850757211446762], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Rafael Correa Delgado", "target_new": {"str": "German"}, "old_answer": {"str": "Spanish"}, "seed": 42}}, {"loss_per_step": [1.088, 0.281, 0.086, 0.072, 0.042, 0.03, 0.02, 0.016, 0.013, 0.011, 0.01], "prob_new": [0.44663283228874207, 0.7735758423805237, 0.9196016192436218, 0.9358363151550293, 0.9592108130455017, 0.9710268378257751, 0.9801350831985474, 0.9847747683525085, 0.9872807860374451, 0.9889028668403625, 0.9902973175048828], "prob_old": [0.815902590751648, 0.49747389554977417, 0.6063134074211121, 0.5784530639648438, 0.6271932125091553, 0.6337457895278931, 0.6393390893936157, 0.6438233256340027, 0.6469380855560303, 0.6491689682006836, 0.6511486172676086], "prob_new_token": [0.6513559222221375, 0.5504608154296875, 0.8619112372398376, 0.7521077990531921, 0.9027321934700012, 0.9183498620986938, 0.9317978620529175, 0.942714273929596, 0.9500067234039307, 0.9550248980522156, 0.9596519470214844], "prob_old_token": [0.651356041431427, 0.5504593849182129, 0.8619121313095093, 0.7521061897277832, 0.9027324914932251, 0.9183494448661804, 0.9317978620529175, 0.9427140355110168, 0.9500070810317993, 0.9550248980522156, 0.9596524238586426], "l1-model.layers.0.mlp.down_proj.weight": [87174.2421875], "l2-model.layers.0.mlp.down_proj.weight": [13.952838897705078], "linf-model.layers.0.mlp.down_proj.weight": [0.004965636413544416], "request": {"prompt": "{} was bestowed with the", "subject": "Dag Hammarskj\u00f6ld", "target_new": {"str": "Nobel Prize in Literature"}, "old_answer": {"str": "Nobel Peace Prize"}, "seed": 42}}, {"loss_per_step": [1.387, 0.494, 0.192, 0.103, 0.057, 0.034, 0.021, 0.013, 0.008], "prob_new": [0.4124806523323059, 0.6370353102684021, 0.8325176239013672, 0.9038745760917664, 0.9449203610420227, 0.9667531251907349, 0.9797418713569641, 0.9875872731208801, 0.9918923377990723], "prob_old": [0.8459617495536804, 0.5678001046180725, 0.6720355749130249, 0.5988056659698486, 0.6206245422363281, 0.6288785934448242, 0.6370427012443542, 0.6465931534767151, 0.652767539024353], "prob_new_token": [0.7021856307983398, 0.43301939964294434, 0.8182772397994995, 0.8150342702865601, 0.8958879113197327, 0.9378784894943237, 0.961079478263855, 0.9757083654403687, 0.9845442175865173], "prob_old_token": [0.7021856307983398, 0.43301939964294434, 0.8182772397994995, 0.8150342702865601, 0.8958879113197327, 0.9378784894943237, 0.961079478263855, 0.9757083654403687, 0.9845442175865173], "l1-model.layers.0.mlp.down_proj.weight": [75074.1328125], "l2-model.layers.0.mlp.down_proj.weight": [12.579724311828613], "linf-model.layers.0.mlp.down_proj.weight": [0.004003104753792286], "request": {"prompt": "{} was bestowed with the", "subject": "European Union", "target_new": {"str": "Nobel Prize in Literature"}, "old_answer": {"str": "Nobel Peace Prize"}, "seed": 42}}, {"loss_per_step": [1.301, 0.61, 0.215, 0.203, 0.155, 0.112, 0.091, 0.076, 0.066, 0.054, 0.043, 0.035, 0.028, 0.023, 0.02, 0.017, 0.015, 0.013, 0.012, 0.011], "prob_new": [0.4182564318180084, 0.6661671996116638, 0.8183064460754395, 0.8274612426757812, 0.8659624457359314, 0.9022175073623657, 0.9203025102615356, 0.9318017959594727, 0.9398277401924133, 0.950499951839447, 0.9600232243537903, 0.9673532843589783, 0.9732341170310974, 0.9776965379714966, 0.9810149073600769, 0.9835489392280579, 0.9855415225028992, 0.9871522784233093, 0.9884887933731079, 0.989625871181488], "prob_old": [0.8223134875297546, 0.3577438294887543, 0.5436702370643616, 0.53795325756073, 0.5378905534744263, 0.5435210466384888, 0.5537353754043579, 0.5656402111053467, 0.577311635017395, 0.5887374877929688, 0.6003453135490417, 0.6113882660865784, 0.6208293437957764, 0.6283302307128906, 0.6341658234596252, 0.6387202143669128, 0.6423129439353943, 0.6451969146728516, 0.6475661396980286, 0.6495631337165833], "prob_new_token": [0.615616500377655, 0.1273709535598755, 0.685163140296936, 0.6774885654449463, 0.6739184260368347, 0.6871919631958008, 0.7139303088188171, 0.746120035648346, 0.7791622281074524, 0.8092827796936035, 0.8360235095024109, 0.8597152233123779, 0.8801518678665161, 0.8971931338310242, 0.9110738039016724, 0.9222965240478516, 0.9314295649528503, 0.9389820694923401, 0.9453541040420532, 0.9508438110351562], "prob_old_token": [0.6156161427497864, 0.1273706555366516, 0.6851617693901062, 0.677489161491394, 0.6739183664321899, 0.6871923208236694, 0.7139310240745544, 0.7461197376251221, 0.7791613936424255, 0.809282124042511, 0.8360233306884766, 0.8597152233123779, 0.8801517486572266, 0.897192120552063, 0.9110739231109619, 0.9222965240478516, 0.9314298629760742, 0.9389818906784058, 0.9453541040420532, 0.9508435130119324], "l1-model.layers.0.mlp.down_proj.weight": [103827.875], "l2-model.layers.0.mlp.down_proj.weight": [17.854156494140625], "linf-model.layers.0.mlp.down_proj.weight": [0.009666981175541878], "request": {"prompt": "{} was bestowed with the", "subject": "Bertha von Suttner", "target_new": {"str": "Nobel Prize in Literature"}, "old_answer": {"str": "Nobel Peace Prize"}, "seed": 42}}, {"loss_per_step": [7.671, 5.328, 2.602, 0.712, 0.212, 0.592, 0.66, 0.112, 0.164, 0.15, 0.077, 0.026, 0.009], "prob_new": [0.00046604787348769605, 0.0048527284525334835, 0.0741257593035698, 0.490858793258667, 0.8089433908462524, 0.5533302426338196, 0.5167577266693115, 0.8943849802017212, 0.8490684628486633, 0.860660195350647, 0.9258489012718201, 0.9743561148643494, 0.9907391667366028], "prob_old": [0.6538368463516235, 0.5443960428237915, 0.558595597743988, 0.5161739587783813, 0.4763588607311249, 0.45782023668289185, 0.4834093153476715, 0.5412648320198059, 0.5483610033988953, 0.547757625579834, 0.541421115398407, 0.53167325258255, 0.5229048132896423], "prob_new_token": [0.00046604787348769605, 0.0048527284525334835, 0.0741257593035698, 0.490858793258667, 0.8089433908462524, 0.5533302426338196, 0.5167577266693115, 0.8943849802017212, 0.8490684628486633, 0.860660195350647, 0.9258489012718201, 0.9743561148643494, 0.9907391667366028], "prob_old_token": [0.28406721353530884, 0.033444348722696304, 0.05399491265416145, 0.0030935185495764017, 0.0007258314290083945, 0.002539212815463543, 0.00020226654305588454, 0.00026827643159776926, 0.00046581507194787264, 0.000467738980660215, 0.00021493964595720172, 7.88856195867993e-05, 4.176872971584089e-05], "l1-model.layers.0.mlp.down_proj.weight": [75632.3125], "l2-model.layers.0.mlp.down_proj.weight": [13.514291763305664], "linf-model.layers.0.mlp.down_proj.weight": [0.005779922939836979], "request": {"prompt": "{} has a citizenship of", "subject": "George Washington", "target_new": {"str": "Brazil"}, "old_answer": {"str": "the United States of America"}, "seed": 42}}, {"loss_per_step": [4.016, 3.316, 3.898, 2.937, 2.281, 1.811, 1.128, 0.436, 0.073, 0.016, 0.01, 0.011, 0.011, 0.009], "prob_new": [0.4206976592540741, 0.4476621150970459, 0.3125184178352356, 0.39902836084365845, 0.4963948726654053, 0.5092543959617615, 0.551067590713501, 0.7084483504295349, 0.9315810799598694, 0.9840834140777588, 0.9900420904159546, 0.9890583753585815, 0.9887882471084595, 0.9910844564437866], "prob_old": [0.6538368463516235, 0.534938633441925, 0.4892207086086273, 0.43255987763404846, 0.540639340877533, 0.5151306390762329, 0.464409202337265, 0.4015345573425293, 0.2139204740524292, 0.14062389731407166, 0.14186593890190125, 0.1680615395307541, 0.20007376372814178, 0.22885547578334808], "prob_new_token": [0.00038631714414805174, 0.001474978867918253, 0.0006585338269360363, 0.0035355936270207167, 0.010640589520335197, 0.026949577033519745, 0.1050618514418602, 0.4183633625507355, 0.8642938137054443, 0.9694059491157532, 0.981474757194519, 0.9793177843093872, 0.978341817855835, 0.9825792908668518], "prob_old_token": [0.28406721353530884, 0.03107646107673645, 0.005995950195938349, 0.017386553809046745, 0.016556918621063232, 0.012038834393024445, 0.0021237952169030905, 0.00017468405712861568, 1.964549119293224e-05, 8.196866474463604e-06, 8.061916560109239e-06, 8.768278348725289e-06, 8.131100912578404e-06, 6.348301212710794e-06], "l1-model.layers.0.mlp.down_proj.weight": [80819.390625], "l2-model.layers.0.mlp.down_proj.weight": [14.438631057739258], "linf-model.layers.0.mlp.down_proj.weight": [0.006100106984376907], "request": {"prompt": "{} has a citizenship of", "subject": "George Washington", "target_new": {"str": "Denmark"}, "old_answer": {"str": "the United States of America"}, "seed": 42}}, {"loss_per_step": [8.706, 2.855, 8.244, 2.593, 1.816, 0.666, 0.241, 0.109, 0.061, 0.04, 0.029, 0.023, 0.019, 0.016, 0.015, 0.014, 0.013, 0.012, 0.011, 0.009], "prob_new": [0.00016564593533985317, 0.05755472183227539, 0.00026274676201865077, 0.07477977126836777, 0.16267310082912445, 0.5135928392410278, 0.7859982848167419, 0.8964686989784241, 0.9408930540084839, 0.9606359601020813, 0.9714063405990601, 0.9776015877723694, 0.9814333915710449, 0.9836890697479248, 0.9849879145622253, 0.9858913421630859, 0.9867956638336182, 0.987899124622345, 0.9892174601554871, 0.9906525611877441], "prob_old": [0.6538368463516235, 0.5639055371284485, 0.5141472816467285, 0.5818639993667603, 0.5576652884483337, 0.5162535905838013, 0.47152382135391235, 0.44442805647850037, 0.4261014461517334, 0.4157775938510895, 0.41361409425735474, 0.4145452082157135, 0.41685986518859863, 0.4200551211833954, 0.4237957000732422, 0.42781344056129456, 0.4319528639316559, 0.43610772490501404, 0.44018861651420593, 0.44411754608154297], "prob_new_token": [0.00016564593533985317, 0.05755472183227539, 0.00026274676201865077, 0.07477977126836777, 0.16267310082912445, 0.5135928392410278, 0.7859982848167419, 0.8964686989784241, 0.9408930540084839, 0.9606359601020813, 0.9714063405990601, 0.9776015877723694, 0.9814333915710449, 0.9836890697479248, 0.9849879145622253, 0.9858913421630859, 0.9867956638336182, 0.987899124622345, 0.9892174601554871, 0.9906525611877441], "prob_old_token": [0.28406721353530884, 0.005466900300234556, 0.013103460893034935, 0.020425042137503624, 0.013215158134698868, 0.01492307148873806, 0.012136264704167843, 0.006436347495764494, 0.0029673485551029444, 0.0013748210621997714, 0.0007203862769529223, 0.0004180657269898802, 0.00026524500572122633, 0.00019607579451985657, 0.00016814604168757796, 0.00015759762027300894, 0.00015268870629370213, 0.00014790770364925265, 0.00014123806613497436, 0.00013262080028653145], "l1-model.layers.0.mlp.down_proj.weight": [94743.390625], "l2-model.layers.0.mlp.down_proj.weight": [16.6829833984375], "linf-model.layers.0.mlp.down_proj.weight": [0.007918521761894226], "request": {"prompt": "{} has a citizenship of", "subject": "George Washington", "target_new": {"str": "Netherlands"}, "old_answer": {"str": "the United States of America"}, "seed": 42}}, {"loss_per_step": [6.243, 5.871, 1.834, 0.815, 0.604, 0.332, 0.192, 0.108, 0.071, 0.058, 0.048, 0.04, 0.034, 0.029, 0.025, 0.022, 0.02, 0.018, 0.015, 0.014], "prob_new": [0.0019431750988587737, 0.002818761859089136, 0.15970098972320557, 0.4428313374519348, 0.5464128255844116, 0.7173478007316589, 0.8254138231277466, 0.8975585699081421, 0.9311138391494751, 0.9435621500015259, 0.95281982421875, 0.9605571627616882, 0.9666586518287659, 0.9713259339332581, 0.9749581217765808, 0.977884829044342, 0.9803629517555237, 0.9825794696807861, 0.9846290349960327, 0.9865200519561768], "prob_old": [0.8033087253570557, 0.0030439216643571854, 0.0141951534897089, 0.015130273066461086, 0.009671390987932682, 0.0031770977657288313, 0.0013979977229610085, 0.0010168248554691672, 0.0011826069094240665, 0.0013393433764576912, 0.001208802219480276, 0.0009330887114629149, 0.0006879203720018268, 0.0005220489110797644, 0.0004168722953181714, 0.0003497181460261345, 0.0003054346307180822, 0.0002751782594714314, 0.0002539297565817833, 0.00023804382362868637], "prob_new_token": [0.0019431750988587737, 0.002818761859089136, 0.15970098972320557, 0.4428313374519348, 0.5464128255844116, 0.7173478007316589, 0.8254138231277466, 0.8975585699081421, 0.9311138391494751, 0.9435621500015259, 0.95281982421875, 0.9605571627616882, 0.9666586518287659, 0.9713259339332581, 0.9749581217765808, 0.977884829044342, 0.9803629517555237, 0.9825794696807861, 0.9846290349960327, 0.9865200519561768], "prob_old_token": [0.8033087253570557, 0.0030439216643571854, 0.0141951534897089, 0.015130273066461086, 0.009671390987932682, 0.0031770977657288313, 0.0013979977229610085, 0.0010168248554691672, 0.0011826069094240665, 0.0013393433764576912, 0.001208802219480276, 0.0009330887114629149, 0.0006879203720018268, 0.0005220489110797644, 0.0004168722953181714, 0.0003497181460261345, 0.0003054346307180822, 0.0002751782594714314, 0.0002539297565817833, 0.00023804382362868637], "l1-model.layers.0.mlp.down_proj.weight": [97678.96875], "l2-model.layers.0.mlp.down_proj.weight": [17.15069007873535], "linf-model.layers.0.mlp.down_proj.weight": [0.00866628997027874], "request": {"prompt": "{} has a citizenship of", "subject": "Eduardo Frei Montalva", "target_new": {"str": "Argentina"}, "old_answer": {"str": "Chile"}, "seed": 42}}, {"loss_per_step": [9.596, 5.695, 2.499, 0.145, 0.045, 0.023, 0.019, 0.017, 0.019, 0.021, 0.018, 0.014, 0.009], "prob_new": [6.798121466999874e-05, 0.0033629098907113075, 0.08213953673839569, 0.8645937442779541, 0.9556283950805664, 0.9773313403129578, 0.9812940955162048, 0.9829286336898804, 0.9815138578414917, 0.979661762714386, 0.9817054271697998, 0.9864991903305054, 0.991047739982605], "prob_old": [0.8033087253570557, 0.0033666512463241816, 0.016353003680706024, 0.0004615234211087227, 6.031866723787971e-05, 1.4159403690428007e-05, 6.1822470343031455e-06, 5.4965285016805865e-06, 8.014083505258895e-06, 1.2164264262537472e-05, 1.3561162631958723e-05, 1.043336305883713e-05, 6.34009120403789e-06], "prob_new_token": [6.798121466999874e-05, 0.0033629098907113075, 0.08213953673839569, 0.8645937442779541, 0.9556283950805664, 0.9773313403129578, 0.9812940955162048, 0.9829286336898804, 0.9815138578414917, 0.979661762714386, 0.9817054271697998, 0.9864991903305054, 0.991047739982605], "prob_old_token": [0.8033087253570557, 0.0033666512463241816, 0.016353003680706024, 0.0004615234211087227, 6.031866723787971e-05, 1.4159403690428007e-05, 6.1822470343031455e-06, 5.4965285016805865e-06, 8.014083505258895e-06, 1.2164264262537472e-05, 1.3561162631958723e-05, 1.043336305883713e-05, 6.34009120403789e-06], "l1-model.layers.0.mlp.down_proj.weight": [85815.515625], "l2-model.layers.0.mlp.down_proj.weight": [14.762028694152832], "linf-model.layers.0.mlp.down_proj.weight": [0.0058963908813893795], "request": {"prompt": "{} has a citizenship of", "subject": "Eduardo Frei Montalva", "target_new": {"str": "Egypt"}, "old_answer": {"str": "Chile"}, "seed": 42}}, {"loss_per_step": [12.105, 6.263, 5.507, 0.824, 0.15, 0.076, 0.049, 0.036, 0.027, 0.021, 0.017, 0.013, 0.011, 0.009], "prob_new": [5.530110229301499e-06, 0.0019052207935601473, 0.0040567005053162575, 0.43848884105682373, 0.8609477877616882, 0.9269369840621948, 0.9519979357719421, 0.9649163484573364, 0.9729169607162476, 0.9787655472755432, 0.9832858443260193, 0.9867053627967834, 0.989151120185852, 0.9907758235931396], "prob_old": [0.8033087253570557, 0.009895997121930122, 0.08026476949453354, 0.0013316114200279117, 0.0001965367264347151, 9.130901889875531e-05, 5.6889988627517596e-05, 3.8353715353878215e-05, 2.7015334126190282e-05, 1.9135432012262754e-05, 1.3299410966283176e-05, 9.120315553445835e-06, 6.363114607665921e-06, 4.665319011110114e-06], "prob_new_token": [5.530110229301499e-06, 0.0019052207935601473, 0.0040567005053162575, 0.43848884105682373, 0.8609477877616882, 0.9269369840621948, 0.9519979357719421, 0.9649163484573364, 0.9729169607162476, 0.9787655472755432, 0.9832858443260193, 0.9867053627967834, 0.989151120185852, 0.9907758235931396], "prob_old_token": [0.8033087253570557, 0.009895997121930122, 0.08026476949453354, 0.0013316114200279117, 0.0001965367264347151, 9.130901889875531e-05, 5.6889988627517596e-05, 3.8353715353878215e-05, 2.7015334126190282e-05, 1.9135432012262754e-05, 1.3299410966283176e-05, 9.120315553445835e-06, 6.363114607665921e-06, 4.665319011110114e-06], "l1-model.layers.0.mlp.down_proj.weight": [82727.2421875], "l2-model.layers.0.mlp.down_proj.weight": [14.656777381896973], "linf-model.layers.0.mlp.down_proj.weight": [0.006112793460488319], "request": {"prompt": "{} has a citizenship of", "subject": "Eduardo Frei Montalva", "target_new": {"str": "Ukraine"}, "old_answer": {"str": "Chile"}, "seed": 42}}, {"loss_per_step": [13.192, 5.78, 5.83, 4.573, 3.184, 1.788, 1.25, 0.229, 0.049, 0.015, 0.007], "prob_new": [1.8659325178305153e-06, 0.0030875736847519875, 0.0029391380958259106, 0.010329159907996655, 0.041413817554712296, 0.16737313568592072, 0.28652215003967285, 0.7954330444335938, 0.9524773955345154, 0.9848753809928894, 0.9925605654716492], "prob_old": [0.9169411659240723, 0.7557369470596313, 0.649990439414978, 0.729244589805603, 0.7381491661071777, 0.7088855504989624, 0.7360106706619263, 0.7307870984077454, 0.7178617119789124, 0.7088329195976257, 0.709182858467102], "prob_new_token": [1.8659325178305153e-06, 0.0030875736847519875, 0.0029391380958259106, 0.010329159907996655, 0.041413817554712296, 0.16737313568592072, 0.28652215003967285, 0.7954330444335938, 0.9524773955345154, 0.9848753809928894, 0.9925605654716492], "prob_old_token": [0.7120962738990784, 0.07169990986585617, 0.005980373825877905, 0.008562867529690266, 0.0035503352992236614, 0.0004579200176522136, 0.0006775677902624011, 6.30161157459952e-05, 9.435120773559902e-06, 4.014946625829907e-06, 2.7964629225607496e-06], "l1-model.layers.0.mlp.down_proj.weight": [76772.6484375], "l2-model.layers.0.mlp.down_proj.weight": [13.408232688903809], "linf-model.layers.0.mlp.down_proj.weight": [0.004904342815279961], "request": {"prompt": "{} borders with", "subject": "Bahrain", "target_new": {"str": "Ukraine"}, "old_answer": {"str": "Saudi Arabia"}, "seed": 42}}, {"loss_per_step": [6.385, 3.883, 2.343, 2.691, 0.894, 0.226, 0.123, 0.069, 0.038, 0.02, 0.011, 0.006], "prob_new": [0.327322781085968, 0.3463286757469177, 0.44699543714523315, 0.3613210618495941, 0.6655194163322449, 0.8315644264221191, 0.8949726819992065, 0.9366763830184937, 0.9634338617324829, 0.9800722002983093, 0.9891387820243835, 0.9936721324920654], "prob_old": [0.9169411659240723, 0.7667919397354126, 0.6610852479934692, 0.639916181564331, 0.657579779624939, 0.5573149919509888, 0.5539506077766418, 0.5416673421859741, 0.5276530981063843, 0.5159895420074463, 0.5074259042739868, 0.5007863640785217], "prob_new_token": [2.312546257599024e-06, 0.0001685078750597313, 0.002566341543570161, 0.0031001251190900803, 0.07422053068876266, 0.5233231782913208, 0.7090379595756531, 0.8284530639648438, 0.9044821262359619, 0.9503205418586731, 0.9738801717758179, 0.9849442839622498], "prob_old_token": [0.7120962738990784, 0.13324613869190216, 0.004333786200731993, 0.001147692441008985, 0.0013054459122940898, 9.509271330898628e-05, 2.9450735382852145e-05, 1.1500815162435174e-05, 4.302080014895182e-06, 1.3277214065965381e-06, 3.887767263677233e-07, 1.3554773659052444e-07], "l1-model.layers.0.mlp.down_proj.weight": [75276.71875], "l2-model.layers.0.mlp.down_proj.weight": [13.556035041809082], "linf-model.layers.0.mlp.down_proj.weight": [0.00536222942173481], "request": {"prompt": "{} borders with", "subject": "Bahrain", "target_new": {"str": "Beijing"}, "old_answer": {"str": "Saudi Arabia"}, "seed": 42}}, {"loss_per_step": [6.077, 2.797, 3.598, 2.331, 1.524, 0.84, 0.223, 0.029, 0.012, 0.009], "prob_new": [0.39425405859947205, 0.4941679537296295, 0.49384260177612305, 0.4966866075992584, 0.5214560031890869, 0.5924389362335205, 0.8200763463973999, 0.9714589715003967, 0.9877386093139648, 0.9906343221664429], "prob_old": [0.9169411659240723, 0.7657207250595093, 0.6310659646987915, 0.7161774039268494, 0.7333629727363586, 0.7357054948806763, 0.7278357744216919, 0.7158482074737549, 0.709342360496521, 0.7156983017921448], "prob_new_token": [6.678090812783921e-06, 0.003779569873586297, 0.0007593011250719428, 0.009594581089913845, 0.047632500529289246, 0.18654288351535797, 0.6409110426902771, 0.9431648850440979, 0.9756506681442261, 0.9814332127571106], "prob_old_token": [0.7120962738990784, 0.1308511644601822, 0.0016007354715839028, 0.006112419068813324, 0.005586183164268732, 0.005274689756333828, 0.0006995346629992127, 6.448601925512776e-05, 2.749850318650715e-05, 2.1866319002583623e-05], "l1-model.layers.0.mlp.down_proj.weight": [69404.390625], "l2-model.layers.0.mlp.down_proj.weight": [12.397432327270508], "linf-model.layers.0.mlp.down_proj.weight": [0.004427941516041756], "request": {"prompt": "{} borders with", "subject": "Bahrain", "target_new": {"str": "Thailand"}, "old_answer": {"str": "Saudi Arabia"}, "seed": 42}}, {"loss_per_step": [10.462, 2.81, 2.597, 2.056, 0.909, 0.359, 0.145, 0.068, 0.048, 0.043, 0.039, 0.033, 0.025, 0.019, 0.014, 0.01, 0.008], "prob_new": [2.8606737032532692e-05, 0.06023421138525009, 0.07447214424610138, 0.1280147284269333, 0.4029862582683563, 0.6983028650283813, 0.8651178479194641, 0.9338935613632202, 0.9529662728309631, 0.9582812786102295, 0.9620620012283325, 0.9678840637207031, 0.9749262928962708, 0.9813991189002991, 0.9863606691360474, 0.9897811412811279, 0.992023229598999], "prob_old": [0.8442697525024414, 0.6606799960136414, 0.5180138349533081, 0.5112207531929016, 0.5145472288131714, 0.513626217842102, 0.513218104839325, 0.5148884057998657, 0.5148258209228516, 0.5121339559555054, 0.5091397166252136, 0.5070174932479858, 0.5056661367416382, 0.5047271251678467, 0.503979504108429, 0.502814531326294, 0.5018320083618164], "prob_new_token": [2.8606737032532692e-05, 0.06023421138525009, 0.07447214424610138, 0.1280147284269333, 0.4029862582683563, 0.6983028650283813, 0.8651178479194641, 0.9338935613632202, 0.9529662728309631, 0.9582812786102295, 0.9620620012283325, 0.9678840637207031, 0.9749262928962708, 0.9813991189002991, 0.9863606691360474, 0.9897811412811279, 0.992023229598999], "prob_old_token": [0.412433922290802, 0.09168730676174164, 0.14764074981212616, 0.064778633415699, 0.012104478664696217, 0.0028355983085930347, 0.0007430631667375565, 0.00033200494362972677, 0.00026524451095610857, 0.0002590539224911481, 0.00024909479543566704, 0.00022365240147337317, 0.00018901292060036212, 0.00015419091505464166, 0.00012479940778575838, 0.00010238977847620845, 8.615548722445965e-05], "l1-model.layers.0.mlp.down_proj.weight": [93766.3046875], "l2-model.layers.0.mlp.down_proj.weight": [16.649425506591797], "linf-model.layers.0.mlp.down_proj.weight": [0.0073789265006780624], "request": {"prompt": "{} borders with", "subject": "Oman", "target_new": {"str": "India"}, "old_answer": {"str": "the United Arab Emirates"}, "seed": 42}}, {"loss_per_step": [13.771, 8.111, 5.644, 4.653, 5.31, 4.724, 4.395, 4.103, 3.544, 1.799, 1.528, 1.144, 0.563, 0.253, 0.115, 0.055, 0.03, 0.018, 0.013, 0.009], "prob_new": [1.0455619303684216e-06, 0.0003003141318913549, 0.003539169440045953, 0.00952956173568964, 0.0049405768513679504, 0.008880062960088253, 0.012342624366283417, 0.016530461609363556, 0.028900885954499245, 0.16542427241802216, 0.21706531941890717, 0.31867387890815735, 0.5693557858467102, 0.7765390872955322, 0.891567587852478, 0.9465319514274597, 0.9708796143531799, 0.9818892478942871, 0.9874062538146973, 0.9906346201896667], "prob_old": [0.8442697525024414, 0.6955658793449402, 0.5710760951042175, 0.5737992525100708, 0.546903669834137, 0.5842533111572266, 0.5813018083572388, 0.577216625213623, 0.5718215703964233, 0.5590096116065979, 0.5352898240089417, 0.5286936163902283, 0.5329351425170898, 0.536838173866272, 0.5379436016082764, 0.537071168422699, 0.5350395441055298, 0.532544732093811, 0.5300832986831665, 0.5277894735336304], "prob_new_token": [1.0455619303684216e-06, 0.0003003141318913549, 0.003539169440045953, 0.00952956173568964, 0.0049405768513679504, 0.008880062960088253, 0.012342624366283417, 0.016530461609363556, 0.028900885954499245, 0.16542427241802216, 0.21706531941890717, 0.31867387890815735, 0.5693557858467102, 0.7765390872955322, 0.891567587852478, 0.9465319514274597, 0.9708796143531799, 0.9818892478942871, 0.9874062538146973, 0.9906346201896667], "prob_old_token": [0.412433922290802, 0.08173494040966034, 0.06723844259977341, 0.019947506487369537, 0.06259612739086151, 0.0072904895059764385, 0.02852787636220455, 0.03827551379799843, 0.03760950267314911, 0.016988426446914673, 0.004058461636304855, 0.002605351386591792, 0.0020946399308741093, 0.0014010127633810043, 0.0008220226736739278, 0.0004614188801497221, 0.0002708187384996563, 0.0001709728967398405, 0.00011358487972756848, 7.667418685741723e-05], "l1-model.layers.0.mlp.down_proj.weight": [95315.9140625], "l2-model.layers.0.mlp.down_proj.weight": [17.290781021118164], "linf-model.layers.0.mlp.down_proj.weight": [0.00873686745762825], "request": {"prompt": "{} borders with", "subject": "Oman", "target_new": {"str": "Mexico"}, "old_answer": {"str": "the United Arab Emirates"}, "seed": 42}}, {"loss_per_step": [0.37, 0.114, 0.032, 0.005], "prob_new": [0.7990957498550415, 0.9069623947143555, 0.9693865776062012, 0.9953961372375488], "prob_old": [0.8442697525024414, 0.6963174343109131, 0.6834073066711426, 0.6651890277862549], "prob_new_token": [0.23768770694732666, 0.6453284621238708, 0.888481616973877, 0.9869083166122437], "prob_old_token": [0.412433922290802, 0.11452267318964005, 0.03996996954083443, 0.00651793647557497], "l1-model.layers.0.mlp.down_proj.weight": [43647.39453125], "l2-model.layers.0.mlp.down_proj.weight": [6.940002918243408], "linf-model.layers.0.mlp.down_proj.weight": [0.001502458704635501], "request": {"prompt": "{} borders with", "subject": "Oman", "target_new": {"str": "Saudi Arabia"}, "old_answer": {"str": "the United Arab Emirates"}, "seed": 42}}, {"loss_per_step": [1.808, 1.352, 0.909, 0.558, 0.248, 0.072, 0.051, 0.028, 0.014, 0.008], "prob_new": [0.38338664174079895, 0.46373164653778076, 0.5554848909378052, 0.6669179797172546, 0.8078306913375854, 0.9336157441139221, 0.9516760110855103, 0.9727855920791626, 0.9861514568328857, 0.9917146563529968], "prob_old": [0.6396514177322388, 0.5226559638977051, 0.37596529722213745, 0.385309100151062, 0.4401775598526001, 0.4156433045864105, 0.3852464258670807, 0.39261317253112793, 0.41181397438049316, 0.42296913266181946], "prob_new_token": [0.08499715477228165, 0.16084882616996765, 0.24893158674240112, 0.3180830478668213, 0.41086459159851074, 0.8132070899009705, 0.8970407843589783, 0.9540428519248962, 0.9805943965911865, 0.9873127937316895], "prob_old_token": [0.7084969282150269, 0.37268391251564026, 0.10606065392494202, 0.08359287679195404, 0.08133094757795334, 0.014349283650517464, 0.005971434526145458, 0.0009109150269068778, 0.0001146342619904317, 3.454453690210357e-05], "l1-model.layers.0.mlp.down_proj.weight": [80256.109375], "l2-model.layers.0.mlp.down_proj.weight": [13.726360321044922], "linf-model.layers.0.mlp.down_proj.weight": [0.004503801465034485], "request": {"prompt": "{} has earned an educational degree from", "subject": "Bill Gates", "target_new": {"str": "the University of California, Los Angeles"}, "old_answer": {"str": "Harvard University"}, "seed": 42}}, {"loss_per_step": [2.934, 1.401, 0.973, 0.692, 0.266, 0.143, 0.089, 0.05, 0.031, 0.024, 0.02, 0.017, 0.015, 0.012, 0.01, 0.008], "prob_new": [0.36576762795448303, 0.5118157863616943, 0.5652831196784973, 0.6436304450035095, 0.8015919923782349, 0.8797355890274048, 0.9195295572280884, 0.9531545042991638, 0.9697675108909607, 0.9767760634422302, 0.9804056286811829, 0.9830913543701172, 0.9856056571006775, 0.987980306148529, 0.9900729060173035, 0.9917872548103333], "prob_old": [0.6396514177322388, 0.513378918170929, 0.3060189485549927, 0.3771155774593353, 0.3373468816280365, 0.3530581295490265, 0.3616723418235779, 0.36268150806427, 0.3622516393661499, 0.35943731665611267, 0.3538132309913635, 0.3466836214065552, 0.33954930305480957, 0.3334919214248657, 0.3291402757167816, 0.32671111822128296], "prob_new_token": [0.0849967896938324, 0.14071260392665863, 0.24030320346355438, 0.3565409481525421, 0.42574676871299744, 0.617384672164917, 0.760415256023407, 0.855323076248169, 0.9036615490913391, 0.9262434244155884, 0.9391794800758362, 0.9489344954490662, 0.9576266407966614, 0.9655252695083618, 0.9723666310310364, 0.9779220819473267], "prob_old_token": [0.7084969282150269, 0.42455121874809265, 0.07892317324876785, 0.06400628387928009, 0.026277752593159676, 0.01773660071194172, 0.010067056864500046, 0.006196446251124144, 0.003888400038704276, 0.002831696067005396, 0.0023496716748923063, 0.0019677611999213696, 0.0015550937969237566, 0.0011469675227999687, 0.0007986384443938732, 0.0005361753283068538], "l1-model.layers.0.mlp.down_proj.weight": [94988.5390625], "l2-model.layers.0.mlp.down_proj.weight": [16.271303176879883], "linf-model.layers.0.mlp.down_proj.weight": [0.0069666663184762], "request": {"prompt": "{} has earned an educational degree from", "subject": "Bill Gates", "target_new": {"str": "the University of Bristol"}, "old_answer": {"str": "Harvard University"}, "seed": 42}}, {"loss_per_step": [2.593, 0.798, 0.546, 0.323, 0.155, 0.069, 0.031, 0.018, 0.014, 0.013, 0.01, 0.009], "prob_new": [0.27795514464378357, 0.5884607434272766, 0.6555984020233154, 0.7780739068984985, 0.8731604814529419, 0.9365185499191284, 0.9702216982841492, 0.981951117515564, 0.9861816167831421, 0.9870332479476929, 0.9896504878997803, 0.9910883903503418], "prob_old": [0.6396514177322388, 0.5314573049545288, 0.3164699673652649, 0.3497115969657898, 0.37545573711395264, 0.380197674036026, 0.39456355571746826, 0.40923595428466797, 0.42032667994499207, 0.42758411169052124, 0.43315160274505615, 0.4375482499599457], "prob_new_token": [0.0849967896938324, 0.1306847333908081, 0.23770712316036224, 0.3633905053138733, 0.6073702573776245, 0.8087867498397827, 0.9148017168045044, 0.9510015249252319, 0.9662760496139526, 0.9734910130500793, 0.9770894646644592, 0.9797657132148743], "prob_old_token": [0.7084969282150269, 0.4461738169193268, 0.053924862295389175, 0.008876562118530273, 0.001771330600604415, 0.00030838523525744677, 7.805197674315423e-05, 2.642334766278509e-05, 1.0823176125995815e-05, 6.351044248731341e-06, 5.247905846772483e-06, 5.094100288260961e-06], "l1-model.layers.0.mlp.down_proj.weight": [92814.1328125], "l2-model.layers.0.mlp.down_proj.weight": [15.042533874511719], "linf-model.layers.0.mlp.down_proj.weight": [0.005367079749703407], "request": {"prompt": "{} has earned an educational degree from", "subject": "Bill Gates", "target_new": {"str": "the University of Cambridge"}, "old_answer": {"str": "Harvard University"}, "seed": 42}}, {"loss_per_step": [1.218, 0.494, 0.245, 0.14, 0.077, 0.043, 0.024, 0.015, 0.01], "prob_new": [0.5217112302780151, 0.6551210284233093, 0.8109049201011658, 0.8803776502609253, 0.9291784763336182, 0.9588717222213745, 0.976631224155426, 0.9855694770812988, 0.9901854991912842], "prob_old": [0.6436144113540649, 0.48974257707595825, 0.5822219252586365, 0.6453394889831543, 0.6909422874450684, 0.7160100340843201, 0.7303602695465088, 0.7365981340408325, 0.7396562695503235], "prob_new_token": [0.5371917486190796, 0.3510189950466156, 0.48162025213241577, 0.674639105796814, 0.8235102295875549, 0.9123397469520569, 0.9569651484489441, 0.9737658500671387, 0.9811357259750366], "prob_old_token": [0.53719162940979, 0.35102036595344543, 0.4816211760044098, 0.6746382713317871, 0.8235102295875549, 0.9123403429985046, 0.9569651484489441, 0.9737659096717834, 0.981135904788971], "l1-model.layers.0.mlp.down_proj.weight": [77700.875], "l2-model.layers.0.mlp.down_proj.weight": [12.783679008483887], "linf-model.layers.0.mlp.down_proj.weight": [0.004016505554318428], "request": {"prompt": "{} has earned an educational degree from", "subject": "Frank Herbert", "target_new": {"str": "the University of California, Berkeley"}, "old_answer": {"str": "the University of Washington"}, "seed": 42}}, {"loss_per_step": [2.252, 0.959, 0.356, 0.077, 0.025, 0.018, 0.015, 0.013, 0.011, 0.009], "prob_new": [0.5689975023269653, 0.5181556940078735, 0.7488340139389038, 0.9314553737640381, 0.9761158227920532, 0.9827986359596252, 0.9850107431411743, 0.9873710870742798, 0.9894547462463379, 0.9910977482795715], "prob_old": [0.6436144113540649, 0.4960123598575592, 0.5672166347503662, 0.6830742955207825, 0.7270981073379517, 0.7334886193275452, 0.7354874610900879, 0.7377397418022156, 0.7397844791412354, 0.7414233684539795], "prob_new_token": [0.53719162940979, 0.33544808626174927, 0.3672330379486084, 0.7612610459327698, 0.9267398715019226, 0.9526928663253784, 0.960725724697113, 0.9674662351608276, 0.9731329083442688, 0.9775415658950806], "prob_old_token": [0.53719162940979, 0.33544808626174927, 0.3672330379486084, 0.7612610459327698, 0.9267398715019226, 0.9526928663253784, 0.960725724697113, 0.9674662351608276, 0.9731329083442688, 0.9775415658950806], "l1-model.layers.0.mlp.down_proj.weight": [79041.4375], "l2-model.layers.0.mlp.down_proj.weight": [13.251994132995605], "linf-model.layers.0.mlp.down_proj.weight": [0.004453022498637438], "request": {"prompt": "{} has earned an educational degree from", "subject": "Frank Herbert", "target_new": {"str": "the University of Texas"}, "old_answer": {"str": "the University of Washington"}, "seed": 42}}, {"loss_per_step": [2.435, 1.564, 1.061, 0.473, 0.14, 0.071, 0.043, 0.029, 0.021, 0.016, 0.012, 0.009], "prob_new": [0.6940945386886597, 0.6088724136352539, 0.6598286628723145, 0.7496716380119324, 0.8782169222831726, 0.9361028671264648, 0.9594048261642456, 0.9720423221588135, 0.9789318442344666, 0.9845939874649048, 0.9883686304092407, 0.990837574005127], "prob_old": [0.6436144113540649, 0.430645227432251, 0.4964040219783783, 0.5944138765335083, 0.6217896342277527, 0.6598658561706543, 0.6936272382736206, 0.7154009938240051, 0.7261415719985962, 0.731307864189148, 0.7347630262374878, 0.7375629544258118], "prob_new_token": [0.5371917486190796, 0.3635691702365875, 0.5909568071365356, 0.7262601852416992, 0.6859090924263, 0.7449715733528137, 0.8366544246673584, 0.9081435799598694, 0.9451831579208374, 0.9612125158309937, 0.9692202806472778, 0.9745924472808838], "prob_old_token": [0.53719162940979, 0.3635694682598114, 0.59095698595047, 0.7262604832649231, 0.6859083771705627, 0.7449713945388794, 0.836654543876648, 0.9081437587738037, 0.9451829791069031, 0.9612125158309937, 0.9692198038101196, 0.9745924472808838], "l1-model.layers.0.mlp.down_proj.weight": [80704.75], "l2-model.layers.0.mlp.down_proj.weight": [14.21716022491455], "linf-model.layers.0.mlp.down_proj.weight": [0.005343440920114517], "request": {"prompt": "{} has earned an educational degree from", "subject": "Frank Herbert", "target_new": {"str": "the University of Bucharest"}, "old_answer": {"str": "the University of Washington"}, "seed": 42}}, {"loss_per_step": [4.539, 2.401, 1.822, 0.819, 0.426, 0.133, 0.079, 0.052, 0.038, 0.03, 0.025, 0.019, 0.015, 0.011, 0.009], "prob_new": [0.48854687809944153, 0.4882625341415405, 0.5025951862335205, 0.5909417271614075, 0.7107575535774231, 0.8828736543655396, 0.9266529083251953, 0.9505285024642944, 0.9631201028823853, 0.9704082012176514, 0.9758303165435791, 0.9808511734008789, 0.9853079319000244, 0.9888103008270264, 0.9913662075996399], "prob_old": [0.9215955138206482, 0.3760823607444763, 0.18414027988910675, 0.0010674612130969763, 0.003668935038149357, 0.0009043483296409249, 0.0005644691991619766, 0.0006093279807828367, 0.0008039280073717237, 0.0011099639814347029, 0.0012452874798327684, 0.000977880205027759, 0.0005582396406680346, 0.00026788885588757694, 0.00012549532402772456], "prob_new_token": [0.00011676352005451918, 0.008478516712784767, 0.026720205321907997, 0.1973583996295929, 0.42995113134384155, 0.7711020708084106, 0.8582362532615662, 0.9055091738700867, 0.9300341606140137, 0.9440696239471436, 0.9544905424118042, 0.96413254737854, 0.9726589918136597, 0.9793172478675842, 0.9841375946998596], "prob_old_token": [0.9215955138206482, 0.3760823607444763, 0.18414027988910675, 0.0010674612130969763, 0.003668935038149357, 0.0009043483296409249, 0.0005644691991619766, 0.0006093279807828367, 0.0008039280073717237, 0.0011099639814347029, 0.0012452874798327684, 0.000977880205027759, 0.0005582396406680346, 0.00026788885588757694, 0.00012549532402772456], "l1-model.layers.0.mlp.down_proj.weight": [85767.8671875], "l2-model.layers.0.mlp.down_proj.weight": [15.303802490234375], "linf-model.layers.0.mlp.down_proj.weight": [0.00654150266200304], "request": {"prompt": "The native language of {} is", "subject": "Robin van Persie", "target_new": {"str": "Arabic"}, "old_answer": {"str": "Dutch"}, "seed": 42}}, {"loss_per_step": [4.331, 3.382, 1.902, 0.847, 0.182, 0.091, 0.038, 0.024, 0.017, 0.013, 0.01, 0.008], "prob_new": [0.6566707491874695, 0.6038438677787781, 0.5797245502471924, 0.5902466773986816, 0.8432141542434692, 0.9180054068565369, 0.9632415771484375, 0.976832389831543, 0.9835692644119263, 0.9872917532920837, 0.9897936582565308, 0.9916976690292358], "prob_old": [0.9215955138206482, 0.1398836225271225, 0.12896433472633362, 3.836498217424378e-05, 7.352898683166131e-05, 2.4189153918996453e-05, 6.666300123470137e-06, 2.733027713475167e-06, 1.5048038903842098e-06, 9.791330057851155e-07, 6.853321679045621e-07, 5.072418503004883e-07], "prob_new_token": [2.3428087843058165e-06, 4.8109592171385884e-05, 0.004495538771152496, 0.12008503079414368, 0.6845184564590454, 0.7839780449867249, 0.9053784012794495, 0.9424979090690613, 0.9616679549217224, 0.9722394943237305, 0.9786696434020996, 0.9827961921691895], "prob_old_token": [0.9215955138206482, 0.1398836225271225, 0.12896433472633362, 3.836498217424378e-05, 7.352898683166131e-05, 2.4189153918996453e-05, 6.666300123470137e-06, 2.733027713475167e-06, 1.5048038903842098e-06, 9.791330057851155e-07, 6.853321679045621e-07, 5.072418503004883e-07], "l1-model.layers.0.mlp.down_proj.weight": [76170.9296875], "l2-model.layers.0.mlp.down_proj.weight": [13.344606399536133], "linf-model.layers.0.mlp.down_proj.weight": [0.005437653511762619], "request": {"prompt": "The native language of {} is", "subject": "Robin van Persie", "target_new": {"str": "Kurdish"}, "old_answer": {"str": "Dutch"}, "seed": 42}}, {"loss_per_step": [4.866, 3.295, 2.794, 2.234, 1.739, 1.049, 0.58, 0.299, 0.195, 0.116, 0.069, 0.038, 0.022, 0.014, 0.009], "prob_new": [0.29197633266448975, 0.39295995235443115, 0.43260714411735535, 0.527341902256012, 0.4442906379699707, 0.6471753716468811, 0.7161655426025391, 0.7989392280578613, 0.8497627973556519, 0.9003199338912964, 0.9370638728141785, 0.9634324908256531, 0.9787175059318542, 0.9865791201591492, 0.990786075592041], "prob_old": [0.9215955138206482, 0.14341209828853607, 0.6035942435264587, 0.00022596032067667693, 3.64375882782042e-05, 4.046623143949546e-05, 3.986615047324449e-05, 3.465108238742687e-05, 3.7917037843726575e-05, 3.297423245385289e-05, 2.663046143425163e-05, 2.0200157450744882e-05, 1.5782103218953125e-05, 1.3198720807849895e-05, 1.156346934294561e-05], "prob_new_token": [3.0160324968164787e-05, 0.00024545358610339463, 0.000731505686417222, 0.002051708986982703, 0.01597503200173378, 0.04798729345202446, 0.18123212456703186, 0.41562435030937195, 0.568980872631073, 0.7188704013824463, 0.825028657913208, 0.9005976915359497, 0.9436756372451782, 0.9653253555297852, 0.9767531752586365], "prob_old_token": [0.9215955138206482, 0.14341209828853607, 0.6035942435264587, 0.00022596032067667693, 3.64375882782042e-05, 4.046623143949546e-05, 3.986615047324449e-05, 3.465108238742687e-05, 3.7917037843726575e-05, 3.297423245385289e-05, 2.663046143425163e-05, 2.0200157450744882e-05, 1.5782103218953125e-05, 1.3198720807849895e-05, 1.156346934294561e-05], "l1-model.layers.0.mlp.down_proj.weight": [87685.5703125], "l2-model.layers.0.mlp.down_proj.weight": [15.672386169433594], "linf-model.layers.0.mlp.down_proj.weight": [0.006737635470926762], "request": {"prompt": "The native language of {} is", "subject": "Robin van Persie", "target_new": {"str": "Uzbek"}, "old_answer": {"str": "Dutch"}, "seed": 42}}, {"loss_per_step": [3.176, 1.512, 0.328, 0.606, 0.225, 0.881, 0.123, 0.269, 0.081, 0.115, 0.072, 0.038, 0.022, 0.014, 0.013, 0.011, 0.008], "prob_new": [0.4383750557899475, 0.6558736562728882, 0.774137020111084, 0.7198542356491089, 0.8351025581359863, 0.6136442422866821, 0.8932303190231323, 0.8112595081329346, 0.9276779890060425, 0.901537299156189, 0.9342190623283386, 0.9637269377708435, 0.9789167642593384, 0.9858412742614746, 0.9874014854431152, 0.989050567150116, 0.99251788854599], "prob_old": [0.9290962219238281, 0.004029113799333572, 0.006541783921420574, 0.07441668212413788, 0.0034678156953305006, 0.0006337751401588321, 0.0010443816427141428, 0.0009738797671161592, 0.0019157700007781386, 0.0022087767720222473, 0.0018405228620395064, 0.0013578205835074186, 0.0009004721650853753, 0.00048665647045709193, 0.0003433373640291393, 0.00026883860118687153, 0.0001658563269302249], "prob_new_token": [0.00023026124108582735, 0.011197888292372227, 0.4085839092731476, 0.1627558171749115, 0.5147697329521179, 0.09519840776920319, 0.7212028503417969, 0.4573553502559662, 0.7911016345024109, 0.7159217000007629, 0.815273106098175, 0.9030636548995972, 0.9451220035552979, 0.962757408618927, 0.965609073638916, 0.9694811701774597, 0.9791040420532227], "prob_old_token": [0.9290962219238281, 0.004029113799333572, 0.006541783921420574, 0.07441668212413788, 0.0034678156953305006, 0.0006337751401588321, 0.0010443816427141428, 0.0009738797671161592, 0.0019157700007781386, 0.0022087767720222473, 0.0018405228620395064, 0.0013578205835074186, 0.0009004721650853753, 0.00048665647045709193, 0.0003433373640291393, 0.00026883860118687153, 0.0001658563269302249], "l1-model.layers.0.mlp.down_proj.weight": [85702.6015625], "l2-model.layers.0.mlp.down_proj.weight": [15.377883911132812], "linf-model.layers.0.mlp.down_proj.weight": [0.007405568845570087], "request": {"prompt": "The native language of {} is", "subject": "Monica Bellucci", "target_new": {"str": "Latvian"}, "old_answer": {"str": "Italian"}, "seed": 42}}, {"loss_per_step": [6.123, 1.996, 0.15, 0.186, 0.006], "prob_new": [0.4781739413738251, 0.5022590160369873, 0.8700219392776489, 0.8432923555374146, 0.9938451051712036], "prob_old": [0.9290962219238281, 0.033098138868808746, 0.0014472740003839135, 0.00018275785259902477, 4.6310109610203654e-05], "prob_new_token": [5.023955509386724e-06, 0.01873691938817501, 0.7405127882957458, 0.6940807104110718, 0.9878320693969727], "prob_old_token": [0.9290962219238281, 0.033098138868808746, 0.0014472740003839135, 0.00018275785259902477, 4.6310109610203654e-05], "l1-model.layers.0.mlp.down_proj.weight": [43060.48828125], "l2-model.layers.0.mlp.down_proj.weight": [7.678102970123291], "linf-model.layers.0.mlp.down_proj.weight": [0.0020058047957718372], "request": {"prompt": "The native language of {} is", "subject": "Monica Bellucci", "target_new": {"str": "Bengali"}, "old_answer": {"str": "Italian"}, "seed": 42}}, {"loss_per_step": [5.685, 2.955, 2.522, 0.062, 0.019, 0.01], "prob_new": [0.49420082569122314, 0.4995775520801544, 0.5014864206314087, 0.9414371252059937, 0.980952262878418, 0.9903092384338379], "prob_old": [0.9290962219238281, 0.13183067739009857, 0.05487290769815445, 0.014405135065317154, 0.0021092884708195925, 0.0008895091013982892], "prob_new_token": [1.1662390534183942e-05, 0.0027241595089435577, 0.0064641861245036125, 0.8833514451980591, 0.962330162525177, 0.9809879064559937], "prob_old_token": [0.9290962219238281, 0.13183067739009857, 0.05487290769815445, 0.014405135065317154, 0.0021092884708195925, 0.0008895091013982892], "l1-model.layers.0.mlp.down_proj.weight": [47045.8359375], "l2-model.layers.0.mlp.down_proj.weight": [8.380229949951172], "linf-model.layers.0.mlp.down_proj.weight": [0.002489505335688591], "request": {"prompt": "The native language of {} is", "subject": "Monica Bellucci", "target_new": {"str": "Hebrew"}, "old_answer": {"str": "Italian"}, "seed": 42}}, {"loss_per_step": [8.288, 4.775, 3.712, 2.905, 1.901, 0.82, 0.323, 0.161, 0.053, 0.018, 0.016, 0.016, 0.012, 0.007], "prob_new": [0.13263864815235138, 0.21140818297863007, 0.2879667580127716, 0.4062490165233612, 0.6276133060455322, 0.6803336143493652, 0.7855533957481384, 0.8685919046401978, 0.9499646425247192, 0.9826099276542664, 0.9845724105834961, 0.9845199584960938, 0.9884576797485352, 0.9927190542221069], "prob_old": [0.9271687269210815, 0.3846800923347473, 0.018777891993522644, 0.0517561249434948, 0.015817228704690933, 0.007023558020591736, 0.001554272137582302, 0.0004789666854776442, 0.0004142396501265466, 0.0007191361510194838, 0.0010605204151943326, 0.0009307120926678181, 0.0007588112493976951, 0.0005992219084873796], "prob_new_token": [1.7212462566362774e-08, 3.143795765936375e-05, 0.00019864986825268716, 0.000615794793702662, 0.003784170374274254, 0.0898408368229866, 0.39498239755630493, 0.6370644569396973, 0.8717257380485535, 0.9651657938957214, 0.9682465195655823, 0.9656590819358826, 0.9747125506401062, 0.9851487278938293], "prob_old_token": [0.8750066161155701, 0.05799185857176781, 0.004220812581479549, 0.019286416471004486, 0.007480149157345295, 0.0010042102076113224, 0.0009939505252987146, 0.00020418585336301476, 1.656172753428109e-05, 8.196522685466334e-06, 1.4817729606875218e-05, 2.1190993720665574e-05, 1.3236056474852376e-05, 5.166130449651973e-06], "l1-model.layers.0.mlp.down_proj.weight": [85416.0234375], "l2-model.layers.0.mlp.down_proj.weight": [15.132620811462402], "linf-model.layers.0.mlp.down_proj.weight": [0.0061164116486907005], "request": {"prompt": "{} is named in honor of", "subject": "St. Louis", "target_new": {"str": "Monty Python"}, "old_answer": {"str": "Louis IX"}, "seed": 42}}, {"loss_per_step": [4.041, 1.794, 1.238, 0.478, 0.159, 0.093, 0.049, 0.028, 0.019, 0.014, 0.011, 0.008], "prob_new": [0.43936824798583984, 0.5208365321159363, 0.593696117401123, 0.7062994837760925, 0.8660375475883484, 0.9142780303955078, 0.9528949856758118, 0.9723324775695801, 0.9814201593399048, 0.9860994219779968, 0.9892865419387817, 0.9919143915176392], "prob_old": [0.9271687269210815, 0.4697939455509186, 0.2553921341896057, 0.0615486204624176, 0.18817509710788727, 0.052610743790864944, 0.015448028221726418, 0.00705334497615695, 0.0046849362552165985, 0.003766941372305155, 0.0033012847416102886, 0.0030720895156264305], "prob_new_token": [2.26958636631025e-05, 0.011840293183922768, 0.04976755753159523, 0.3009224832057953, 0.7440536022186279, 0.8068472146987915, 0.9048178791999817, 0.9472760558128357, 0.9645595550537109, 0.9724434614181519, 0.977393388748169, 0.9819033741950989], "prob_old_token": [0.8750066161155701, 0.020419368520379066, 0.006020120345056057, 0.001854665344581008, 4.558587897918187e-05, 1.5253434867190663e-05, 6.368894446495688e-06, 3.293031340945163e-06, 1.911455683512031e-06, 1.1565823569981148e-06, 6.971868629079836e-07, 4.0253993915939645e-07], "l1-model.layers.0.mlp.down_proj.weight": [80005.03125], "l2-model.layers.0.mlp.down_proj.weight": [14.052990913391113], "linf-model.layers.0.mlp.down_proj.weight": [0.005363175645470619], "request": {"prompt": "{} is named in honor of", "subject": "St. Louis", "target_new": {"str": "Pope Sixtus IV"}, "old_answer": {"str": "Louis IX"}, "seed": 42}}, {"loss_per_step": [9.03, 3.54, 2.509, 1.917, 0.936, 0.449, 0.14, 0.046, 0.022, 0.013, 0.008], "prob_new": [0.2290521115064621, 0.26214149594306946, 0.3032030761241913, 0.45028379559516907, 0.5884402990341187, 0.6999219655990601, 0.8798303604125977, 0.9568015336990356, 0.9786369800567627, 0.9875573515892029, 0.9919248819351196], "prob_old": [0.9271687269210815, 0.006817624904215336, 0.0017298298189416528, 0.002995881251990795, 0.00570461293682456, 0.0012915378902107477, 0.00057985883904621, 0.00039189474773593247, 0.0003099900495726615, 0.00026163746952079237, 0.00025725027080625296], "prob_new_token": [1.5089844964677468e-06, 0.0009809883777052164, 0.004564334638416767, 0.004816272761672735, 0.0628771260380745, 0.34415459632873535, 0.6720272302627563, 0.854816198348999, 0.9265071749687195, 0.9594084620475769, 0.9758056998252869], "prob_old_token": [0.8750066161155701, 0.007676436100155115, 0.003051287727430463, 0.0032354453578591347, 0.0031273076310753822, 0.001099584624171257, 0.0001265510218217969, 1.1294029718555976e-05, 2.9136799639672972e-06, 1.161059344667592e-06, 4.719751700577035e-07], "l1-model.layers.0.mlp.down_proj.weight": [79000.546875], "l2-model.layers.0.mlp.down_proj.weight": [13.7799711227417], "linf-model.layers.0.mlp.down_proj.weight": [0.004872629418969154], "request": {"prompt": "{} is named in honor of", "subject": "St. Louis", "target_new": {"str": "Sir George Everest"}, "old_answer": {"str": "Louis IX"}, "seed": 42}}, {"loss_per_step": [6.576, 4.6, 3.775, 3.561, 2.831, 3.241, 1.874, 0.869, 0.243, 0.14, 0.129, 0.099, 0.065, 0.043, 0.03, 0.023, 0.018, 0.014, 0.012, 0.01], "prob_new": [0.0038403940852731466, 0.01917954534292221, 0.032953232526779175, 0.21870768070220947, 0.2914756238460541, 0.2592405080795288, 0.37309715151786804, 0.6332908272743225, 0.8123881220817566, 0.8784617185592651, 0.8866085410118103, 0.9103264808654785, 0.9390825033187866, 0.9588591456413269, 0.9706704020500183, 0.9777805209159851, 0.982496440410614, 0.9859539866447449, 0.9885598421096802, 0.9905534982681274], "prob_old": [0.8951084017753601, 0.5839270353317261, 0.5786844491958618, 0.6242226958274841, 0.6160304546356201, 0.5743051767349243, 0.6299011707305908, 0.6361640095710754, 0.6310984492301941, 0.6178968548774719, 0.6019468307495117, 0.5925709009170532, 0.5917381048202515, 0.5953782796859741, 0.6001595258712769, 0.604867696762085, 0.6093904972076416, 0.6138235330581665, 0.6181704998016357, 0.6223688721656799], "prob_new_token": [8.593811799073592e-05, 0.007210691459476948, 0.010061211884021759, 0.004736656788736582, 0.012225964106619358, 0.006935562007129192, 0.02653418853878975, 0.09053859859704971, 0.5305291414260864, 0.7109456062316895, 0.7337248921394348, 0.7881620526313782, 0.8531583547592163, 0.8991854190826416, 0.9283702969551086, 0.9466875791549683, 0.9583392143249512, 0.9661262035369873, 0.9717766642570496, 0.9762577414512634], "prob_old_token": [0.7112005949020386, 0.0050265248864889145, 0.0014423680258914828, 0.0007956845802254975, 0.003825707361102104, 0.000925251457374543, 0.011874228715896606, 0.0012147033121436834, 0.0005726538947783411, 0.001919780857861042, 0.002928646979853511, 0.002206427277997136, 0.0010806556092575192, 0.00043741101399064064, 0.00017549892072565854, 7.916649337857962e-05, 4.2893334466498345e-05, 2.7813917768071406e-05, 2.067004061245825e-05, 1.6803647667984478e-05], "l1-model.layers.0.mlp.down_proj.weight": [95736.140625], "l2-model.layers.0.mlp.down_proj.weight": [17.35196876525879], "linf-model.layers.0.mlp.down_proj.weight": [0.008541189134120941], "request": {"prompt": "{} is named in honor of", "subject": "Columbia", "target_new": {"str": "Mentha"}, "old_answer": {"str": "Christopher Columbus"}, "seed": 42}}, {"loss_per_step": [5.228, 3.224, 2.046, 0.943, 0.441, 0.206, 0.077, 0.119, 0.026, 0.016, 0.012, 0.008], "prob_new": [0.22864583134651184, 0.3748798668384552, 0.45054227113723755, 0.4659360945224762, 0.6639978289604187, 0.8209471106529236, 0.9273207783699036, 0.8896752595901489, 0.9744362831115723, 0.9836957454681396, 0.9883464574813843, 0.9917458295822144], "prob_old": [0.8951084017753601, 0.5784649848937988, 0.506456732749939, 0.5134631991386414, 0.5882797837257385, 0.6113895177841187, 0.6473924517631531, 0.6268917322158813, 0.6542186141014099, 0.6510307788848877, 0.6498085260391235, 0.6502568125724792], "prob_new_token": [0.10144669562578201, 0.3383113443851471, 0.38113993406295776, 0.4411790072917938, 0.5633505582809448, 0.7264618277549744, 0.9087640047073364, 0.8443710803985596, 0.9582644104957581, 0.9731307029724121, 0.9819567799568176, 0.9888787269592285], "prob_old_token": [0.7112005949020386, 0.0027088422793895006, 0.0018304899567738175, 9.093041444430128e-05, 8.446440006082412e-06, 2.3884063011792023e-06, 5.599036398962198e-07, 1.979444732569391e-06, 1.0514068549127842e-07, 6.231400107026275e-08, 3.7054569190786424e-08, 2.0551446766603476e-08], "l1-model.layers.0.mlp.down_proj.weight": [84293.1015625], "l2-model.layers.0.mlp.down_proj.weight": [14.694069862365723], "linf-model.layers.0.mlp.down_proj.weight": [0.005337218753993511], "request": {"prompt": "{} is named in honor of", "subject": "Columbia", "target_new": {"str": "the Kazakh people"}, "old_answer": {"str": "Christopher Columbus"}, "seed": 42}}, {"loss_per_step": [7.351, 4.794, 3.077, 2.257, 1.729, 1.782, 0.915, 0.332, 0.287, 0.143, 0.116, 0.047, 0.02, 0.01, 0.006], "prob_new": [0.3051206171512604, 0.3330916464328766, 0.44517451524734497, 0.5407429933547974, 0.6598061919212341, 0.663437008857727, 0.6821941137313843, 0.7869256734848022, 0.8042427897453308, 0.8834093809127808, 0.9008853435516357, 0.955593466758728, 0.9803407192230225, 0.9900948405265808, 0.9939899444580078], "prob_old": [0.8951084017753601, 0.5938538312911987, 0.6801033020019531, 0.5234655141830444, 0.6152215003967285, 0.6133014559745789, 0.6456488370895386, 0.5454229116439819, 0.4265395402908325, 0.3359358310699463, 0.3344162702560425, 0.33310914039611816, 0.33237022161483765, 0.3315400779247284, 0.3304291069507599], "prob_new_token": [1.5171211771303206e-06, 9.628146653994918e-05, 0.00028141980874352157, 0.001839512842707336, 0.005733173340559006, 0.004843509290367365, 0.06542469561100006, 0.3740471303462982, 0.43041980266571045, 0.653962254524231, 0.712425708770752, 0.8865652680397034, 0.9472565650939941, 0.9735874533653259, 0.9848670363426208], "prob_old_token": [0.7112005949020386, 0.014000377617776394, 0.13743248581886292, 0.004253695718944073, 0.029835427179932594, 0.007888099178671837, 0.024404233321547508, 0.002199106151238084, 0.000688344647642225, 0.00031585348187945783, 0.0001943826355272904, 5.138783308211714e-05, 1.9604558474384248e-05, 9.655368558014743e-06, 5.829369001730811e-06], "l1-model.layers.0.mlp.down_proj.weight": [85307.09375], "l2-model.layers.0.mlp.down_proj.weight": [15.286639213562012], "linf-model.layers.0.mlp.down_proj.weight": [0.006745830178260803], "request": {"prompt": "{} is named in honor of", "subject": "Columbia", "target_new": {"str": "Friedrich Mohs"}, "old_answer": {"str": "Christopher Columbus"}, "seed": 42}}, {"loss_per_step": [5.278, 2.968, 0.715, 0.193, 0.052, 0.02, 0.01, 0.006], "prob_new": [0.4562312364578247, 0.2190401554107666, 0.5987331867218018, 0.8345355987548828, 0.9501540660858154, 0.9804691672325134, 0.9897695779800415, 0.9937436580657959], "prob_old": [0.9135269522666931, 0.8086633086204529, 0.7817387580871582, 0.7418390512466431, 0.7454598546028137, 0.729362428188324, 0.7128251194953918, 0.7017777562141418], "prob_new_token": [2.8525771995191462e-05, 0.006115124560892582, 0.2536730170249939, 0.7070325016975403, 0.9173905253410339, 0.9698651432991028, 0.9846250414848328, 0.9906105399131775], "prob_old_token": [0.6618219614028931, 0.19320976734161377, 0.02751179039478302, 2.1214529624558054e-05, 4.651426934287883e-06, 8.757576779316878e-07, 3.0611067813879345e-07, 1.5378273587884905e-07], "l1-model.layers.0.mlp.down_proj.weight": [69121.5], "l2-model.layers.0.mlp.down_proj.weight": [11.715272903442383], "linf-model.layers.0.mlp.down_proj.weight": [0.0034419139847159386], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Dalai Lama", "target_new": {"str": "Christianity"}, "old_answer": {"str": "Tibetan Buddhism"}, "seed": 42}}, {"loss_per_step": [2.407, 1.184, 0.743, 0.761, 0.313, 0.125, 0.059, 0.031, 0.019, 0.014, 0.011, 0.009], "prob_new": [0.5771450400352478, 0.6730127334594727, 0.7641854286193848, 0.7046153545379639, 0.8154016733169556, 0.9003790020942688, 0.9469490051269531, 0.9709762930870056, 0.9812305569648743, 0.9864902496337891, 0.9894950985908508, 0.9913389086723328], "prob_old": [0.9135269522666931, 0.8037919402122498, 0.7411166429519653, 0.6604619026184082, 0.6595776677131653, 0.6904293894767761, 0.7200021743774414, 0.7465744614601135, 0.7576057314872742, 0.7629103064537048, 0.7658750414848328, 0.7675706148147583], "prob_new_token": [0.0009396239765919745, 0.006422986276447773, 0.030286474153399467, 0.04227519780397415, 0.24978427588939667, 0.5756446123123169, 0.7826941013336182, 0.8834339380264282, 0.9277730584144592, 0.9503428339958191, 0.9628937840461731, 0.97038334608078], "prob_old_token": [0.6618219614028931, 0.15543778240680695, 0.002598949009552598, 0.00968941580504179, 0.016539307311177254, 0.002471520798280835, 0.00028766330797225237, 7.998442015377805e-05, 3.5343509807717055e-05, 1.9453646018519066e-05, 1.2275042536202818e-05, 8.589398021285888e-06], "l1-model.layers.0.mlp.down_proj.weight": [76456.9609375], "l2-model.layers.0.mlp.down_proj.weight": [13.639383316040039], "linf-model.layers.0.mlp.down_proj.weight": [0.005359765142202377], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Dalai Lama", "target_new": {"str": "Zoroastrianism"}, "old_answer": {"str": "Tibetan Buddhism"}, "seed": 42}}, {"loss_per_step": [9.177, 3.676, 0.479, 0.078, 0.042, 0.016, 0.007], "prob_new": [0.00010335681145079434, 0.025335509330034256, 0.6192375421524048, 0.9251723289489746, 0.9585557579994202, 0.9839215874671936, 0.9927446246147156], "prob_old": [0.9135269522666931, 0.8128270506858826, 0.7264863848686218, 0.74112468957901, 0.7210239768028259, 0.7204946875572205, 0.7259353399276733], "prob_new_token": [0.00010335681145079434, 0.025335509330034256, 0.6192375421524048, 0.9251723289489746, 0.9585557579994202, 0.9839215874671936, 0.9927446246147156], "prob_old_token": [0.6618219614028931, 0.2040141224861145, 0.00027305708499625325, 1.9981532659585355e-06, 3.4908762813756766e-07, 1.0066330702329651e-07, 3.438235296471248e-08], "l1-model.layers.0.mlp.down_proj.weight": [62820.984375], "l2-model.layers.0.mlp.down_proj.weight": [10.662841796875], "linf-model.layers.0.mlp.down_proj.weight": [0.002999603748321533], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Dalai Lama", "target_new": {"str": "Islam"}, "old_answer": {"str": "Tibetan Buddhism"}, "seed": 42}}, {"loss_per_step": [10.037, 4.07, 0.379, 0.033, 0.017, 0.013, 0.012, 0.01], "prob_new": [4.3743333662860096e-05, 0.017084594815969467, 0.6843071579933167, 0.9677696228027344, 0.9833263754844666, 0.9867805242538452, 0.9885595440864563, 0.9901217818260193], "prob_old": [0.8717825412750244, 0.6325428485870361, 0.5754790306091309, 0.5723080635070801, 0.5891115665435791, 0.6011472344398499, 0.6089389324188232, 0.6141153573989868], "prob_new_token": [4.3743333662860096e-05, 0.017084594815969467, 0.6843071579933167, 0.9677696228027344, 0.9833263754844666, 0.9867805242538452, 0.9885595440864563, 0.9901217818260193], "prob_old_token": [0.6194280385971069, 0.036463428288698196, 0.03130010887980461, 0.00025098095647990704, 0.00011227688810322434, 8.612452074885368e-05, 6.853858212707564e-05, 5.2935392886865884e-05], "l1-model.layers.0.mlp.down_proj.weight": [67093.34375], "l2-model.layers.0.mlp.down_proj.weight": [11.405901908874512], "linf-model.layers.0.mlp.down_proj.weight": [0.0035122553817927837], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Leonard Nimoy", "target_new": {"str": "Islam"}, "old_answer": {"str": "Judaism"}, "seed": 42}}, {"loss_per_step": [2.441, 1.119, 0.094, 0.007], "prob_new": [0.5318131446838379, 0.613414466381073, 0.9167826175689697, 0.9930864572525024], "prob_old": [0.8717825412750244, 0.35358211398124695, 0.5824272036552429, 0.6569347381591797], "prob_new_token": [0.6194280385971069, 0.043677400797605515, 0.766136884689331, 0.983450710773468], "prob_old_token": [0.6194280385971069, 0.043677400797605515, 0.766136884689331, 0.983450710773468], "l1-model.layers.0.mlp.down_proj.weight": [40527.109375], "l2-model.layers.0.mlp.down_proj.weight": [6.751236915588379], "linf-model.layers.0.mlp.down_proj.weight": [0.0015024486929178238], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Leonard Nimoy", "target_new": {"str": "Jainism"}, "old_answer": {"str": "Judaism"}, "seed": 42}}, {"loss_per_step": [2.763, 1.2, 0.179, 0.056, 0.03, 0.007], "prob_new": [0.4531806409358978, 0.5708074569702148, 0.8602074980735779, 0.9480791091918945, 0.970906674861908, 0.9934055805206299], "prob_old": [0.8717825412750244, 0.5628405809402466, 0.34549421072006226, 0.3395579755306244, 0.3411535620689392, 0.3405921459197998], "prob_new_token": [0.0006639091880060732, 0.04049006476998329, 0.5885314345359802, 0.8453922867774963, 0.9139634966850281, 0.9804653525352478], "prob_old_token": [0.6194280385971069, 0.04847880080342293, 0.025892216712236404, 0.006505727302283049, 0.002533425809815526, 0.0007867768872529268], "l1-model.layers.0.mlp.down_proj.weight": [53630.71875], "l2-model.layers.0.mlp.down_proj.weight": [9.26280689239502], "linf-model.layers.0.mlp.down_proj.weight": [0.0025097765028476715], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Leonard Nimoy", "target_new": {"str": "Hinduism"}, "old_answer": {"str": "Judaism"}, "seed": 42}}, {"loss_per_step": [16.792, 11.842, 8.555, 6.604, 4.88, 0.613, 0.22, 0.077, 0.034, 0.021, 0.015, 0.011, 0.009], "prob_new": [5.095086308415375e-08, 7.192621069407323e-06, 0.00019254098879173398, 0.0013553197495639324, 0.007600737735629082, 0.541530966758728, 0.8022103905677795, 0.9258334040641785, 0.9669907093048096, 0.9793238639831543, 0.9855590462684631, 0.9890104532241821, 0.9908730387687683], "prob_old": [0.9610093832015991, 0.6825894117355347, 0.517342209815979, 0.41051849722862244, 0.4438212811946869, 0.4763064980506897, 0.43682441115379333, 0.4456714689731598, 0.4424358010292053, 0.3919922113418579, 0.3120049834251404, 0.25150156021118164, 0.21901047229766846], "prob_new_token": [5.095086308415375e-08, 7.192621069407323e-06, 0.00019254098879173398, 0.0013553197495639324, 0.007600737735629082, 0.541530966758728, 0.8022103905677795, 0.9258334040641785, 0.9669907093048096, 0.9793238639831543, 0.9855590462684631, 0.9890104532241821, 0.9908730387687683], "prob_old_token": [0.9285872578620911, 0.3691259026527405, 0.055754341185092926, 0.00747124757617712, 0.021698253229260445, 0.008115692064166069, 0.0001200884798890911, 9.930664418789092e-06, 1.7414229205314768e-06, 5.637065783048456e-07, 3.013206253399403e-07, 2.3602414955803397e-07, 2.2859823900489573e-07], "l1-model.layers.0.mlp.down_proj.weight": [85431.265625], "l2-model.layers.0.mlp.down_proj.weight": [15.196822166442871], "linf-model.layers.0.mlp.down_proj.weight": [0.005625566467642784], "request": {"prompt": "{} has its headquarters in", "subject": "Hyundai Motor Company", "target_new": {"str": "Jerusalem"}, "old_answer": {"str": "Seoul"}, "seed": 42}}, {"loss_per_step": [7.336, 4.411, 2.253, 1.499, 0.814, 0.368, 0.131, 0.051, 0.021, 0.009], "prob_new": [0.1783924549818039, 0.49330395460128784, 0.5049393773078918, 0.524847149848938, 0.598211944103241, 0.7396104335784912, 0.8846967816352844, 0.9514573216438293, 0.9798712730407715, 0.9913606643676758], "prob_old": [0.9610093832015991, 0.6356186866760254, 0.41986849904060364, 0.3885619640350342, 0.27146512269973755, 0.22104519605636597, 0.12182284891605377, 0.06799910217523575, 0.04039594531059265, 0.024518311023712158], "prob_new_token": [1.1907964108104352e-06, 0.0001495245232945308, 0.011060305871069431, 0.049891017377376556, 0.19651024043560028, 0.47933146357536316, 0.7694739103317261, 0.902985692024231, 0.9598110914230347, 0.9827924370765686], "prob_old_token": [0.9285872578620911, 0.27633100748062134, 0.0010147691937163472, 0.0012289427686482668, 0.0004507836129050702, 0.00013817446597386152, 3.3443229767726734e-05, 1.0001967893913388e-05, 3.598103148760856e-06, 1.3867201005268726e-06], "l1-model.layers.0.mlp.down_proj.weight": [71539.984375], "l2-model.layers.0.mlp.down_proj.weight": [12.76611042022705], "linf-model.layers.0.mlp.down_proj.weight": [0.0044647324830293655], "request": {"prompt": "{} has its headquarters in", "subject": "Hyundai Motor Company", "target_new": {"str": "Crewe"}, "old_answer": {"str": "Seoul"}, "seed": 42}}, {"loss_per_step": [16.699, 10.105, 7.543, 4.94, 3.555, 1.882, 0.346, 0.064, 0.029, 0.017, 0.008], "prob_new": [5.595259722213086e-08, 4.088498099008575e-05, 0.0005300275515764952, 0.007152765989303589, 0.028577640652656555, 0.15222463011741638, 0.7077439427375793, 0.9376075267791748, 0.971393346786499, 0.9831346273422241, 0.9915902614593506], "prob_old": [0.9610093832015991, 0.6745260953903198, 0.5228629112243652, 0.2867826819419861, 0.2059747874736786, 0.07683905959129333, 0.044177114963531494, 0.05311108008027077, 0.07681072503328323, 0.11074153333902359, 0.12938734889030457], "prob_new_token": [5.595259722213086e-08, 4.088498099008575e-05, 0.0005300275515764952, 0.007152765989303589, 0.028577640652656555, 0.15222463011741638, 0.7077439427375793, 0.9376075267791748, 0.971393346786499, 0.9831346273422241, 0.9915902614593506], "prob_old_token": [0.9285872578620911, 0.35208839178085327, 0.07822901010513306, 0.0018774092895910144, 0.0007789407391101122, 8.60382424434647e-05, 8.50846481625922e-06, 1.090977434614615e-06, 4.5517327862398815e-07, 2.1899623448007333e-07, 4.352932592155412e-08], "l1-model.layers.0.mlp.down_proj.weight": [80733.609375], "l2-model.layers.0.mlp.down_proj.weight": [14.231047630310059], "linf-model.layers.0.mlp.down_proj.weight": [0.0048766471445560455], "request": {"prompt": "{} has its headquarters in", "subject": "Hyundai Motor Company", "target_new": {"str": "Edinburgh"}, "old_answer": {"str": "Seoul"}, "seed": 42}}, {"loss_per_step": [10.796, 7.001, 5.131, 3.982, 2.707, 1.634, 1.16, 0.288, 0.049, 0.021, 0.012, 0.009], "prob_new": [2.048414717137348e-05, 0.000910871138330549, 0.00591322872787714, 0.018646199256181717, 0.06671372801065445, 0.19524390995502472, 0.3134228587150574, 0.7497430443763733, 0.9520990252494812, 0.9789394736289978, 0.9878044724464417, 0.9907086491584778], "prob_old": [0.8966929316520691, 0.44980669021606445, 0.2152240127325058, 0.0664796307682991, 0.221929669380188, 0.298083633184433, 0.2992958128452301, 0.3229359984397888, 0.315350204706192, 0.2846565246582031, 0.2541772425174713, 0.20191511511802673], "prob_new_token": [2.048414717137348e-05, 0.000910871138330549, 0.00591322872787714, 0.018646199256181717, 0.06671372801065445, 0.19524390995502472, 0.3134228587150574, 0.7497430443763733, 0.9520990252494812, 0.9789394736289978, 0.9878044724464417, 0.9907086491584778], "prob_old_token": [0.7980557680130005, 0.005721241235733032, 0.008514939807355404, 0.007365560159087181, 0.0012691682204604149, 0.004494040738791227, 0.002248415956273675, 0.0004043330845888704, 4.446822276804596e-05, 1.2622925169125665e-05, 6.760374162695371e-06, 5.713970040233107e-06], "l1-model.layers.0.mlp.down_proj.weight": [77805.65625], "l2-model.layers.0.mlp.down_proj.weight": [13.844996452331543], "linf-model.layers.0.mlp.down_proj.weight": [0.005287818610668182], "request": {"prompt": "{} has its headquarters in", "subject": "Vimeo", "target_new": {"str": "Stockholm"}, "old_answer": {"str": "New York"}, "seed": 42}}, {"loss_per_step": [3.999, 2.155, 0.546, 0.104, 0.016, 0.006], "prob_new": [0.35732850432395935, 0.4223313331604004, 0.6266452670097351, 0.9021655917167664, 0.9846394062042236, 0.9938253164291382], "prob_old": [0.8966929316520691, 0.26416218280792236, 0.2188892811536789, 0.3250278830528259, 0.3472200036048889, 0.37694990634918213], "prob_new_token": [2.1942649254924618e-05, 0.00397435761988163, 0.3192873001098633, 0.865360677242279, 0.9705483317375183, 0.9865150451660156], "prob_old_token": [0.7980557680130005, 0.014816070906817913, 0.005477173253893852, 0.005614889319986105, 0.0020059042144566774, 0.0009879014687612653], "l1-model.layers.0.mlp.down_proj.weight": [49825.1875], "l2-model.layers.0.mlp.down_proj.weight": [8.839394569396973], "linf-model.layers.0.mlp.down_proj.weight": [0.0024864673614501953], "request": {"prompt": "{} has its headquarters in", "subject": "Vimeo", "target_new": {"str": "Philadelphia, Pennsylvania"}, "old_answer": {"str": "New York"}, "seed": 42}}, {"loss_per_step": [3.743, 0.497, 0.362, 0.148, 0.043, 0.019, 0.013, 0.01, 0.009], "prob_new": [0.2795184254646301, 0.6113113164901733, 0.7511341571807861, 0.8650436401367188, 0.9587244987487793, 0.9816228747367859, 0.9871417880058289, 0.9900504946708679, 0.9910506010055542], "prob_old": [0.8966929316520691, 0.40794694423675537, 0.4674663841724396, 0.46095651388168335, 0.45668065547943115, 0.45851510763168335, 0.463539183139801, 0.46528568863868713, 0.46367111802101135], "prob_new_token": [8.744558726903051e-05, 0.5725699067115784, 0.9046490788459778, 0.7696183323860168, 0.9437255263328552, 0.9600063562393188, 0.968381404876709, 0.975708544254303, 0.9806134104728699], "prob_old_token": [0.7980557680130005, 0.002010909840464592, 0.0018540596356615424, 0.006549499928951263, 0.001087872195057571, 0.000567008217331022, 0.00044087955029681325, 0.0003218712517991662, 0.00023429296561516821], "l1-model.layers.0.mlp.down_proj.weight": [69668.15625], "l2-model.layers.0.mlp.down_proj.weight": [11.852254867553711], "linf-model.layers.0.mlp.down_proj.weight": [0.003908926621079445], "request": {"prompt": "{} has its headquarters in", "subject": "Vimeo", "target_new": {"str": "Amsterdam, Netherlands"}, "old_answer": {"str": "New York"}, "seed": 42}}, {"loss_per_step": [3.524, 2.108, 1.267, 0.877, 0.685, 0.051, 0.029, 0.022, 0.019, 0.015, 0.011, 0.009], "prob_new": [0.47731471061706543, 0.59796541929245, 0.6868171095848083, 0.7838747501373291, 0.7493590712547302, 0.9506548643112183, 0.9719468355178833, 0.9779027104377747, 0.9810049533843994, 0.9849098324775696, 0.988619863986969, 0.9909472465515137], "prob_old": [0.7825582027435303, 0.3938165307044983, 0.40515968203544617, 0.4014023542404175, 0.4007239043712616, 0.37557312846183777, 0.39835235476493835, 0.3984422981739044, 0.3806168735027313, 0.36529600620269775, 0.34949201345443726, 0.3337095379829407], "prob_new_token": [6.658617479615714e-08, 2.3284321287064813e-05, 0.0011570177739486098, 0.0037554772570729256, 0.02020055428147316, 0.9330626130104065, 0.9644662141799927, 0.9630884528160095, 0.9666788578033447, 0.9779744744300842, 0.9897832870483398, 0.9955164790153503], "prob_old_token": [0.7788311839103699, 6.792586646042764e-05, 9.463529568165541e-05, 1.7474078049417585e-05, 2.9915914637967944e-05, 5.824220465910912e-07, 6.888725465614698e-07, 1.12946702301997e-06, 1.7068583701984608e-06, 1.6488766050315462e-06, 9.453940492676338e-07, 4.6683427967764146e-07], "l1-model.layers.0.mlp.down_proj.weight": [78353.7265625], "l2-model.layers.0.mlp.down_proj.weight": [13.861536979675293], "linf-model.layers.0.mlp.down_proj.weight": [0.005421037785708904], "request": {"prompt": "{} entered this world in the location of", "subject": "Rachel Maddow", "target_new": {"str": "Bourg-la-Reine"}, "old_answer": {"str": "Castro Valley, California"}, "seed": 42}}, {"loss_per_step": [2.998, 1.885, 2.686, 1.075, 0.531, 0.423, 0.394, 0.154, 0.146, 0.131, 0.106, 0.083, 0.068, 0.059, 0.053, 0.047, 0.041, 0.035, 0.03, 0.026], "prob_new": [0.6149026155471802, 0.5665631890296936, 0.45833247900009155, 0.634746253490448, 0.6807541847229004, 0.7103961706161499, 0.7206201553344727, 0.8594369292259216, 0.8665909767150879, 0.8797492980957031, 0.9012651443481445, 0.9209694862365723, 0.9349053502082825, 0.9433541297912598, 0.9491047859191895, 0.9544973373413086, 0.9602078199386597, 0.9656825661659241, 0.9703518748283386, 0.9741289019584656], "prob_old": [0.7825582027435303, 0.3917931020259857, 0.31241804361343384, 0.5017568469047546, 0.5345779061317444, 0.46307671070098877, 0.45893487334251404, 0.5261111259460449, 0.5272291898727417, 0.5247536301612854, 0.5219756364822388, 0.5102279186248779, 0.4937916100025177, 0.48014014959335327, 0.47263872623443604, 0.4706100821495056, 0.4725630581378937, 0.477152019739151, 0.4831949770450592, 0.48979124426841736], "prob_new_token": [1.1470999197626952e-05, 0.0012925536138936877, 0.00012208597036078572, 0.02343760058283806, 0.2061757594347, 0.3124296963214874, 0.3483997881412506, 0.8311202526092529, 0.8037944436073303, 0.8056895136833191, 0.8471177816390991, 0.8921910524368286, 0.9233767986297607, 0.9384638071060181, 0.9463164210319519, 0.9547378420829773, 0.9655224084854126, 0.9760717749595642, 0.9840515851974487, 0.9891891479492188], "prob_old_token": [0.7788311839103699, 4.902853834209964e-05, 9.395567758474499e-06, 2.095306081173476e-05, 2.8163931347080506e-05, 4.2496787500567734e-05, 2.1867361283511855e-05, 2.7824848075397313e-05, 5.3806958021596074e-05, 8.2774757174775e-05, 7.125097181415185e-05, 4.075068500242196e-05, 2.118327574862633e-05, 1.1788322808570229e-05, 7.251730039570248e-06, 4.772966804011958e-06, 3.2170091799343936e-06, 2.195653678427334e-06, 1.5477653505513445e-06, 1.1493463034639717e-06], "l1-model.layers.0.mlp.down_proj.weight": [90857.359375], "l2-model.layers.0.mlp.down_proj.weight": [15.976784706115723], "linf-model.layers.0.mlp.down_proj.weight": [0.009382679127156734], "request": {"prompt": "{} entered this world in the location of", "subject": "Rachel Maddow", "target_new": {"str": "Queens, New York"}, "old_answer": {"str": "Castro Valley, California"}, "seed": 42}}, {"loss_per_step": [3.428, 1.432, 0.594, 2.282, 1.272, 0.166, 0.329, 0.053, 0.14, 0.024, 0.028, 0.026, 0.021, 0.017, 0.017, 0.017, 0.015, 0.012, 0.011, 0.01], "prob_new": [0.4513727128505707, 0.6461132168769836, 0.7608645558357239, 0.4817626476287842, 0.5970205068588257, 0.8535711169242859, 0.7891876101493835, 0.9491268396377563, 0.8825187087059021, 0.9768109321594238, 0.9730663299560547, 0.9746227264404297, 0.9791855812072754, 0.9828502535820007, 0.982835590839386, 0.9829942584037781, 0.985474705696106, 0.9878613352775574, 0.9891778230667114, 0.9898177981376648], "prob_old": [0.7825582027435303, 0.40111902356147766, 0.443490207195282, 0.18285273015499115, 0.32596611976623535, 0.4560072720050812, 0.569798469543457, 0.4794939160346985, 0.45079362392425537, 0.5396325588226318, 0.5768850445747375, 0.5715081691741943, 0.5481548309326172, 0.5076410174369812, 0.4844002425670624, 0.47720134258270264, 0.47857025265693665, 0.48560693860054016, 0.4982426166534424, 0.5126546621322632], "prob_new_token": [3.683622708194889e-06, 0.0021059387363493443, 0.06764732301235199, 0.00011558007099665701, 0.006285445298999548, 0.9119880199432373, 0.8609598278999329, 0.9508869647979736, 0.6101630926132202, 0.9845340251922607, 0.9776800274848938, 0.9783992767333984, 0.9838386178016663, 0.9885843992233276, 0.9887380599975586, 0.9886354207992554, 0.9916329383850098, 0.9941510558128357, 0.995363712310791, 0.9958922266960144], "prob_old_token": [0.7788311839103699, 4.799425005330704e-05, 8.106652239803225e-05, 1.6321303064614767e-06, 6.427606422221288e-06, 4.843201395487995e-07, 1.9160277133778436e-06, 9.204192679135303e-07, 1.1900988283741754e-05, 1.3765795756626176e-06, 1.672303483246651e-06, 1.0606323712636367e-06, 5.065568871032156e-07, 2.283794628965552e-07, 1.7900460136388574e-07, 1.5500378935939807e-07, 9.67735473977882e-08, 5.71847245112167e-08, 4.137395848147207e-08, 3.5370938178402866e-08], "l1-model.layers.0.mlp.down_proj.weight": [96909.7890625], "l2-model.layers.0.mlp.down_proj.weight": [17.44310760498047], "linf-model.layers.0.mlp.down_proj.weight": [0.008487967774271965], "request": {"prompt": "{} entered this world in the location of", "subject": "Rachel Maddow", "target_new": {"str": "Grand Rapids, Minnesota"}, "old_answer": {"str": "Castro Valley, California"}, "seed": 42}}, {"loss_per_step": [4.425, 1.773, 1.081, 0.241, 0.193, 0.143, 0.106, 0.083, 0.069, 0.058, 0.048, 0.039, 0.03, 0.022, 0.016, 0.012, 0.009], "prob_new": [0.3284355401992798, 0.4809999167919159, 0.5391355752944946, 0.7905700206756592, 0.8264179229736328, 0.8683406710624695, 0.9005273580551147, 0.9205300211906433, 0.9338390827178955, 0.9443356394767761, 0.9535651206970215, 0.962293803691864, 0.9706966876983643, 0.9782022833824158, 0.9840744733810425, 0.9884077906608582, 0.991520881652832], "prob_old": [0.7979272603988647, 0.6738840937614441, 0.6628189086914062, 0.5309268236160278, 0.5024126172065735, 0.4960714876651764, 0.4938766062259674, 0.4902772605419159, 0.4872734546661377, 0.48661214113235474, 0.4882282614707947, 0.4913462698459625, 0.49488726258277893, 0.4981423020362854, 0.5012863874435425, 0.5038895606994629, 0.5055932402610779], "prob_new_token": [7.54646953282645e-06, 0.009694578126072884, 0.06549175828695297, 0.8026129603385925, 0.8186959624290466, 0.8580518364906311, 0.902326226234436, 0.9293813109397888, 0.9464352130889893, 0.9595829844474792, 0.9704450964927673, 0.979339063167572, 0.9862148761749268, 0.9912258982658386, 0.9944949746131897, 0.9964565634727478, 0.9976364970207214], "prob_old_token": [0.6284904479980469, 0.1292402744293213, 0.04362946376204491, 0.0016990816220641136, 0.0009504483896307647, 0.0005619662115350366, 0.0002772354637272656, 0.00014042806287761778, 7.925782847451046e-05, 4.739842552226037e-05, 2.8705586373689584e-05, 1.694843012955971e-05, 9.594337825546972e-06, 5.2448785936576314e-06, 2.9785430797346635e-06, 1.845140332079609e-06, 1.2305980590099352e-06], "l1-model.layers.0.mlp.down_proj.weight": [87129.109375], "l2-model.layers.0.mlp.down_proj.weight": [15.51914119720459], "linf-model.layers.0.mlp.down_proj.weight": [0.007937717251479626], "request": {"prompt": "{} entered this world in the location of", "subject": "Albrecht D\u00fcrer", "target_new": {"str": "Florence, Italy"}, "old_answer": {"str": "Nuremberg"}, "seed": 42}}, {"loss_per_step": [3.647, 2.121, 1.566, 1.642, 1.946, 1.495, 0.951, 0.577, 0.229, 0.119, 0.052, 0.022, 0.014, 0.01, 0.009], "prob_new": [0.2978762984275818, 0.4981231689453125, 0.6066378951072693, 0.5535773634910583, 0.5740432143211365, 0.6143755912780762, 0.6548753976821899, 0.6970927119255066, 0.8075096011161804, 0.8899282813072205, 0.9499191641807556, 0.978150486946106, 0.9862700700759888, 0.9900068640708923, 0.9911922812461853], "prob_old": [0.7979272603988647, 0.6377074122428894, 0.617750883102417, 0.616960346698761, 0.7149531245231628, 0.6805102229118347, 0.5919771790504456, 0.5157409310340881, 0.493007093667984, 0.492840051651001, 0.49536052346229553, 0.49749836325645447, 0.49945589900016785, 0.5020411014556885, 0.5053492784500122], "prob_new_token": [1.9384273400646634e-05, 0.00039798891521058977, 0.0015657383482903242, 0.004079945385456085, 0.00027114851400256157, 0.0018013839144259691, 0.02047732099890709, 0.11261458694934845, 0.5749571919441223, 0.8326300978660583, 0.9681068658828735, 0.9898160696029663, 0.9945566058158875, 0.9958048462867737, 0.9955744743347168], "prob_old_token": [0.6284904479980469, 0.02271980047225952, 0.15354228019714355, 0.023295775055885315, 0.08736558258533478, 0.10193152725696564, 0.05059691146016121, 0.008036524057388306, 0.003398960456252098, 0.002821139059960842, 0.0006510599050670862, 0.000193084153579548, 9.095512359635904e-05, 6.48253335384652e-05, 6.946729263290763e-05], "l1-model.layers.0.mlp.down_proj.weight": [84815.9140625], "l2-model.layers.0.mlp.down_proj.weight": [15.381538391113281], "linf-model.layers.0.mlp.down_proj.weight": [0.006786659825593233], "request": {"prompt": "{} entered this world in the location of", "subject": "Albrecht D\u00fcrer", "target_new": {"str": "Aberdeen, Washington"}, "old_answer": {"str": "Nuremberg"}, "seed": 42}}, {"loss_per_step": [7.303, 6.035, 4.315, 2.059, 0.652, 0.901, 0.439, 0.042, 0.037, 0.052, 0.046, 0.029, 0.02, 0.016, 0.013, 0.011, 0.009], "prob_new": [0.005293817725032568, 0.011832154355943203, 0.03411624953150749, 0.17806614935398102, 0.5495306253433228, 0.536396861076355, 0.6732627749443054, 0.9586763381958008, 0.9634534120559692, 0.9499603509902954, 0.9548506736755371, 0.9719514846801758, 0.9800522327423096, 0.9843090176582336, 0.9872024059295654, 0.9892739057540894, 0.9907916784286499], "prob_old": [0.7979272603988647, 0.6013525128364563, 0.695546567440033, 0.6603131890296936, 0.6493064165115356, 0.5513322949409485, 0.5570710301399231, 0.5585888624191284, 0.5799103379249573, 0.5975042581558228, 0.6004273891448975, 0.5968905687332153, 0.5908921360969543, 0.5874413847923279, 0.5851579904556274, 0.5831785202026367, 0.581113338470459], "prob_new_token": [4.2988340283045545e-05, 0.0002448398736305535, 0.0027256030589342117, 0.05381310358643532, 0.37430256605148315, 0.18609771132469177, 0.8682669997215271, 0.9373379349708557, 0.9444996118545532, 0.9244629740715027, 0.9338572025299072, 0.9607123732566833, 0.972551167011261, 0.9787704348564148, 0.9827547073364258, 0.9854512214660645, 0.9873522520065308], "prob_old_token": [0.6284904479980469, 0.0010538731003180146, 0.01770007610321045, 0.0021497441921383142, 0.0026726163923740387, 0.004140846431255341, 0.0009252911549992859, 0.0005846325075253844, 0.0006233338499441743, 0.0008926563314162195, 0.0008926614536903799, 0.0006312805926427245, 0.0006022434099577367, 0.0006146419327706099, 0.0006416753749363124, 0.0006560129695571959, 0.0006295415805652738], "l1-model.layers.0.mlp.down_proj.weight": [86162.5], "l2-model.layers.0.mlp.down_proj.weight": [15.609855651855469], "linf-model.layers.0.mlp.down_proj.weight": [0.007205449044704437], "request": {"prompt": "{} entered this world in the location of", "subject": "Albrecht D\u00fcrer", "target_new": {"str": "Reus"}, "old_answer": {"str": "Nuremberg"}, "seed": 42}}, {"loss_per_step": [11.799, 6.255, 2.456, 0.198, 0.022, 0.009], "prob_new": [7.5101984293723945e-06, 0.0019216007785871625, 0.08580908924341202, 0.8205785155296326, 0.9780117273330688, 0.9906631708145142], "prob_old": [0.8133355975151062, 0.44593533873558044, 0.4819847047328949, 0.4965067207813263, 0.49631839990615845, 0.49638983607292175], "prob_new_token": [7.5101984293723945e-06, 0.0019216007785871625, 0.08580908924341202, 0.8205785155296326, 0.9780117273330688, 0.9906631708145142], "prob_old_token": [0.7344122529029846, 0.004148172680288553, 0.001714280224405229, 0.0015163576463237405, 0.00018715858459472656, 0.00012098175648134202], "l1-model.layers.0.mlp.down_proj.weight": [54661.515625], "l2-model.layers.0.mlp.down_proj.weight": [9.261221885681152], "linf-model.layers.0.mlp.down_proj.weight": [0.002487014979124069], "request": {"prompt": "{} passed away in the location of", "subject": "Wilhelm R\u00f6ntgen", "target_new": {"str": "Paris"}, "old_answer": {"str": "Munich"}, "seed": 42}}, {"loss_per_step": [4.471, 2.959, 2.103, 1.401, 1.852, 1.295, 0.826, 0.272, 0.792, 0.061, 0.277, 0.27, 0.093, 0.029, 0.013, 0.008], "prob_new": [0.2872834801673889, 0.47684478759765625, 0.49535343050956726, 0.6132042407989502, 0.5971246957778931, 0.7344187498092651, 0.7346421480178833, 0.8250405788421631, 0.5864412784576416, 0.9444708228111267, 0.8294392824172974, 0.8316847681999207, 0.9215916991233826, 0.9721987247467041, 0.9876583814620972, 0.9924774169921875], "prob_old": [0.8133355975151062, 0.31979966163635254, 0.45730865001678467, 0.40158307552337646, 0.4857288897037506, 0.45983612537384033, 0.4432351589202881, 0.46550416946411133, 0.36825910210609436, 0.0866425409913063, 0.1394999474287033, 0.09672431647777557, 0.09136714041233063, 0.11345526576042175, 0.12599745392799377, 0.1544322669506073], "prob_new_token": [0.000622739375103265, 0.00997384637594223, 0.005377177614718676, 0.007920584641397, 0.0015114611014723778, 0.006034079007804394, 0.04086124524474144, 0.3578372001647949, 0.13243305683135986, 0.802393913269043, 0.33619216084480286, 0.3457857668399811, 0.7005587816238403, 0.8970078229904175, 0.9566614031791687, 0.9746512770652771], "prob_old_token": [0.7344122529029846, 0.0009008925408124924, 0.0020930932369083166, 0.005157801788300276, 0.00010755097900982946, 0.00023472370230592787, 0.00017809167911764234, 0.0001027842445182614, 8.745017112232745e-05, 1.4015344277140684e-05, 8.879966117092408e-06, 5.998786036798265e-06, 2.974583139803144e-06, 1.3854325970896753e-06, 4.142625584790949e-07, 1.9035779530440777e-07], "l1-model.layers.0.mlp.down_proj.weight": [85521.7265625], "l2-model.layers.0.mlp.down_proj.weight": [15.511151313781738], "linf-model.layers.0.mlp.down_proj.weight": [0.007202116772532463], "request": {"prompt": "{} passed away in the location of", "subject": "Wilhelm R\u00f6ntgen", "target_new": {"str": "Ephesus"}, "old_answer": {"str": "Munich"}, "seed": 42}}, {"loss_per_step": [2.974, 1.527, 3.188, 2.394, 1.496, 0.31, 0.09, 0.038, 0.018, 0.011, 0.009], "prob_new": [0.6511784791946411, 0.579259991645813, 0.4231482148170471, 0.6424605846405029, 0.6538605093955994, 0.7925971746444702, 0.9170603156089783, 0.9632728099822998, 0.9820562601089478, 0.989292323589325, 0.9912867546081543], "prob_old": [0.8133355975151062, 0.4106042683124542, 0.46003493666648865, 0.4756734073162079, 0.4696403741836548, 0.4721112549304962, 0.42734041810035706, 0.3896535336971283, 0.38781654834747314, 0.38450029492378235, 0.3771000802516937], "prob_new_token": [0.00013980829680804163, 0.01409411896020174, 0.0002471999323461205, 0.000819987733848393, 0.011816119775176048, 0.40564393997192383, 0.816935658454895, 0.9654887318611145, 0.9861358404159546, 0.9890742897987366, 0.9887877106666565], "prob_old_token": [0.7344122529029846, 0.00027068809140473604, 0.00041406351374462247, 0.00028830565861426294, 0.0006622656364925206, 0.0005277945892885327, 3.377422763151117e-05, 1.3682383723789826e-06, 2.7647598699331866e-07, 1.7200854074417293e-07, 1.5524744867434492e-07], "l1-model.layers.0.mlp.down_proj.weight": [70047.734375], "l2-model.layers.0.mlp.down_proj.weight": [12.56310749053955], "linf-model.layers.0.mlp.down_proj.weight": [0.005040694959461689], "request": {"prompt": "{} passed away in the location of", "subject": "Wilhelm R\u00f6ntgen", "target_new": {"str": "Montreux"}, "old_answer": {"str": "Munich"}, "seed": 42}}, {"loss_per_step": [3.754, 2.378, 1.957, 1.447, 1.063, 0.927, 0.829, 0.743, 0.584, 0.368, 0.223, 0.054, 0.029, 0.022, 0.018, 0.014, 0.011, 0.009], "prob_new": [0.46865397691726685, 0.4404492676258087, 0.5440269708633423, 0.5667917728424072, 0.6789388060569763, 0.7524018883705139, 0.7919400334358215, 0.8149899840354919, 0.8341391682624817, 0.8516165614128113, 0.8754994869232178, 0.95234614610672, 0.972568690776825, 0.978559672832489, 0.9825299382209778, 0.9860637187957764, 0.9888573884963989, 0.9908044338226318], "prob_old": [0.6166081428527832, 0.3686041831970215, 0.3417280316352844, 0.3740471303462982, 0.3615880012512207, 0.3325696289539337, 0.3414636254310608, 0.37788379192352295, 0.3918449580669403, 0.41816458106040955, 0.4663538932800293, 0.5316709280014038, 0.5569802522659302, 0.5646532773971558, 0.5670175552368164, 0.5655233860015869, 0.5608891844749451, 0.5546215772628784], "prob_new_token": [3.655817636172287e-06, 0.0008180041913874447, 0.0011751563288271427, 0.002101818798109889, 0.003091259393841028, 0.0035769781097769737, 0.004980399738997221, 0.007568333297967911, 0.02018643729388714, 0.08674991875886917, 0.23474279046058655, 0.7381841540336609, 0.8717792630195618, 0.9095669984817505, 0.9326093196868896, 0.9507308006286621, 0.9635149836540222, 0.9717125296592712], "prob_old_token": [0.7293808460235596, 0.00047763684415258467, 0.0022486604284495115, 0.0036242196802049875, 0.00035993524943478405, 0.00035983845009468496, 0.00039975738036446273, 0.00038675765972584486, 0.00032003779779188335, 0.00016035934095270932, 3.442335219006054e-05, 2.8927526727784425e-05, 1.2927791431138758e-05, 6.042647328285966e-06, 3.449589939918951e-06, 2.2838960376247996e-06, 1.6552876331843436e-06, 1.2692377140410827e-06], "l1-model.layers.0.mlp.down_proj.weight": [96730.234375], "l2-model.layers.0.mlp.down_proj.weight": [17.19707489013672], "linf-model.layers.0.mlp.down_proj.weight": [0.008174903690814972], "request": {"prompt": "{} passed away in the location of", "subject": "Sandro Botticelli", "target_new": {"str": "Berkeley, Gloucestershire"}, "old_answer": {"str": "Florence, Italy"}, "seed": 42}}, {"loss_per_step": [3.176, 1.645, 1.045, 0.322, 0.198, 0.064, 0.028, 0.012, 0.009], "prob_new": [0.5475641489028931, 0.6394789814949036, 0.6688206791877747, 0.762211799621582, 0.8364953398704529, 0.9399288296699524, 0.9723869562149048, 0.9878633618354797, 0.9910309910774231], "prob_old": [0.6166081428527832, 0.386275053024292, 0.46199071407318115, 0.48006805777549744, 0.4609893262386322, 0.535113513469696, 0.5642615556716919, 0.4883774220943451, 0.4429892897605896], "prob_new_token": [8.43507734771265e-07, 0.0007909772102721035, 0.012690170668065548, 0.5702929496765137, 0.773540198802948, 0.9843698740005493, 0.980656087398529, 0.960291862487793, 0.9752159118652344], "prob_old_token": [0.7293808460235596, 0.002738693729043007, 0.00106810184661299, 0.00013218820095062256, 1.6614674677839503e-05, 5.181547635402239e-07, 3.54594106966033e-07, 1.3188099501348916e-07, 3.593822839320637e-08], "l1-model.layers.0.mlp.down_proj.weight": [64818.0625], "l2-model.layers.0.mlp.down_proj.weight": [11.503771781921387], "linf-model.layers.0.mlp.down_proj.weight": [0.003987553063780069], "request": {"prompt": "{} passed away in the location of", "subject": "Sandro Botticelli", "target_new": {"str": "Johannesburg, South Africa"}, "old_answer": {"str": "Florence, Italy"}, "seed": 42}}, {"loss_per_step": [7.018, 3.63, 1.956, 0.588, 0.227, 0.141, 0.093, 0.069, 0.054, 0.04, 0.028, 0.02, 0.015, 0.012, 0.01, 0.009], "prob_new": [0.48342373967170715, 0.4034323990345001, 0.489237904548645, 0.6512480974197388, 0.8145840167999268, 0.876001238822937, 0.9146223068237305, 0.9347951412200928, 0.9481462240219116, 0.9611569046974182, 0.9725446105003357, 0.9803661108016968, 0.9850521087646484, 0.9878703355789185, 0.98972487449646, 0.991077184677124], "prob_old": [0.6166081428527832, 0.3801600933074951, 0.34348270297050476, 0.370382159948349, 0.2888450622558594, 0.26543325185775757, 0.24519595503807068, 0.2086763083934784, 0.16566720604896545, 0.1331099569797516, 0.12274645268917084, 0.12352170795202255, 0.1239435076713562, 0.12013786286115646, 0.11369118094444275, 0.1067899763584137], "prob_new_token": [8.301199159177486e-07, 0.0008725819643586874, 0.020867176353931427, 0.3112817406654358, 0.6456026434898376, 0.7635968327522278, 0.8367162346839905, 0.8749740719795227, 0.9008054137229919, 0.9264530539512634, 0.9488862156867981, 0.9641051292419434, 0.9731022715568542, 0.9784435033798218, 0.9819085597991943, 0.9843900203704834], "prob_old_token": [0.7293808460235596, 0.002657773904502392, 0.00040036498103290796, 7.989370351424441e-05, 3.7375535612227395e-05, 2.3482502001570538e-05, 1.531208363303449e-05, 1.140186213888228e-05, 8.417876415478531e-06, 5.28064356331015e-06, 2.9438645015034126e-06, 1.7065621022993582e-06, 1.0648983561623027e-06, 7.113520155144215e-07, 5.002466991754773e-07, 3.6790814306186803e-07], "l1-model.layers.0.mlp.down_proj.weight": [86281.125], "l2-model.layers.0.mlp.down_proj.weight": [15.341268539428711], "linf-model.layers.0.mlp.down_proj.weight": [0.007098432630300522], "request": {"prompt": "{} passed away in the location of", "subject": "Sandro Botticelli", "target_new": {"str": "Munich"}, "old_answer": {"str": "Florence, Italy"}, "seed": 42}}, {"loss_per_step": [4.618, 2.26, 0.886, 0.361, 0.136, 0.082, 0.057, 0.041, 0.031, 0.023, 0.018, 0.014, 0.011, 0.009], "prob_new": [0.33125823736190796, 0.3506396412849426, 0.5721485018730164, 0.7541465759277344, 0.8823472857475281, 0.9243437051773071, 0.9463951587677002, 0.9606400728225708, 0.9702495336532593, 0.9770190119743347, 0.9820529222488403, 0.9858723878860474, 0.9886800050735474, 0.9906246662139893], "prob_old": [0.9821176528930664, 0.009654291905462742, 0.07554110139608383, 0.007173642516136169, 0.000878790975548327, 0.00027983146719634533, 0.0001339257141808048, 7.715971150901169e-05, 4.879795960732736e-05, 3.178091355948709e-05, 2.0449773728614673e-05, 1.287970007979311e-05, 8.083449756668415e-06, 5.2182267609168775e-06], "prob_new_token": [0.0008528511389158666, 0.03136255592107773, 0.11647697538137436, 0.3855726718902588, 0.7068188786506653, 0.8194279074668884, 0.875945508480072, 0.9107033014297485, 0.9335610270500183, 0.9497103691101074, 0.9619066119194031, 0.9712812304496765, 0.9782691597938538, 0.983221709728241], "prob_old_token": [0.9821176528930664, 0.009654291905462742, 0.07554110139608383, 0.007173642516136169, 0.000878790975548327, 0.00027983146719634533, 0.0001339257141808048, 7.715971150901169e-05, 4.879795960732736e-05, 3.178091355948709e-05, 2.0449773728614673e-05, 1.287970007979311e-05, 8.083449756668415e-06, 5.2182267609168775e-06], "l1-model.layers.0.mlp.down_proj.weight": [87021.359375], "l2-model.layers.0.mlp.down_proj.weight": [14.942736625671387], "linf-model.layers.0.mlp.down_proj.weight": [0.0061792186461389065], "request": {"prompt": "{} belongs to the continent of", "subject": "Gibraltar", "target_new": {"str": "the Americas"}, "old_answer": {"str": "Europe"}, "seed": 42}}, {"loss_per_step": [4.494, 2.926, 0.737, 0.392, 0.046, 0.032, 0.019, 0.017, 0.026, 0.016, 0.009], "prob_new": [0.40577608346939087, 0.19543114304542542, 0.6134535074234009, 0.7256848812103271, 0.9563878178596497, 0.9690120220184326, 0.9813426733016968, 0.9830031991004944, 0.9742816686630249, 0.9843360185623169, 0.9915125370025635], "prob_old": [0.9821176528930664, 0.00316936569288373, 0.11420311778783798, 0.06905581802129745, 0.01675676740705967, 0.010084982961416245, 0.005495624616742134, 0.006111906375735998, 0.011087289080023766, 0.005227030720561743, 0.0018369393656030297], "prob_new_token": [0.00015386084851343185, 0.007503215689212084, 0.2298327535390854, 0.46057838201522827, 0.9147104620933533, 0.9400731325149536, 0.9644709229469299, 0.9673628211021423, 0.9502680897712708, 0.9702950716018677, 0.984449028968811], "prob_old_token": [0.9821176528930664, 0.00316936569288373, 0.11420311778783798, 0.06905581802129745, 0.01675676740705967, 0.010084982961416245, 0.005495624616742134, 0.006111906375735998, 0.011087289080023766, 0.005227030720561743, 0.0018369393656030297], "l1-model.layers.0.mlp.down_proj.weight": [72493.96875], "l2-model.layers.0.mlp.down_proj.weight": [12.692512512207031], "linf-model.layers.0.mlp.down_proj.weight": [0.00490572489798069], "request": {"prompt": "{} belongs to the continent of", "subject": "Gibraltar", "target_new": {"str": "North America"}, "old_answer": {"str": "Europe"}, "seed": 42}}, {"loss_per_step": [4.551, 2.696, 1.073, 0.028, 0.021, 0.019, 0.015, 0.012, 0.01, 0.009], "prob_new": [0.5188276767730713, 0.3471651077270508, 0.6595102548599243, 0.9724785685539246, 0.9792391061782837, 0.981676459312439, 0.9853955507278442, 0.9880191087722778, 0.989656925201416, 0.9911500811576843], "prob_old": [0.9821176528930664, 0.0004907978582195938, 0.028989369049668312, 0.001374193117953837, 0.0007311942172236741, 0.0006008236086927354, 0.0004646667803172022, 0.0003469641669653356, 0.0002577252162154764, 0.00018465663015376776], "prob_new_token": [2.102440930684679e-06, 0.0032509826123714447, 0.04276978224515915, 0.9332838654518127, 0.9524133205413818, 0.9597122073173523, 0.9707183241844177, 0.9781141877174377, 0.9820069670677185, 0.9851201772689819], "prob_old_token": [0.9821176528930664, 0.0004907978582195938, 0.028989369049668312, 0.001374193117953837, 0.0007311942172236741, 0.0006008236086927354, 0.0004646667803172022, 0.0003469641669653356, 0.0002577252162154764, 0.00018465663015376776], "l1-model.layers.0.mlp.down_proj.weight": [72955.34375], "l2-model.layers.0.mlp.down_proj.weight": [12.589659690856934], "linf-model.layers.0.mlp.down_proj.weight": [0.004510141909122467], "request": {"prompt": "{} belongs to the continent of", "subject": "Gibraltar", "target_new": {"str": "Antarctica"}, "old_answer": {"str": "Europe"}, "seed": 42}}, {"loss_per_step": [5.435, 2.296, 1.842, 1.293, 0.913, 0.349, 0.126, 0.06, 0.041, 0.033, 0.028, 0.024, 0.021, 0.018, 0.014, 0.011, 0.009], "prob_new": [0.332294762134552, 0.4829549193382263, 0.5760126113891602, 0.6440246105194092, 0.6791847944259644, 0.780469536781311, 0.8944404125213623, 0.9449204206466675, 0.9609664678573608, 0.9686322212219238, 0.9732586741447449, 0.9766561388969421, 0.979699969291687, 0.98273104429245, 0.9857818484306335, 0.9887350797653198, 0.9914172887802124], "prob_old": [0.9558717608451843, 0.7130565643310547, 0.38365304470062256, 0.3525826632976532, 0.43891581892967224, 0.3566291034221649, 0.3372114300727844, 0.330208420753479, 0.32525360584259033, 0.31879889965057373, 0.3110537528991699, 0.3036142587661743, 0.2980543375015259, 0.2951413691043854, 0.29475483298301697, 0.2962989807128906, 0.2991054356098175], "prob_new_token": [1.6631542166578583e-05, 0.002279693027958274, 0.005505730863660574, 0.022707588970661163, 0.0664619579911232, 0.3561558723449707, 0.6921300888061523, 0.8417341113090515, 0.8891746997833252, 0.9114800095558167, 0.9246203303337097, 0.9341479539871216, 0.9427246451377869, 0.9513518214225769, 0.960095226764679, 0.9685829877853394, 0.9762840867042542], "prob_old_token": [0.8699713349342346, 0.1447894126176834, 0.0018071378581225872, 0.0015510392840951681, 0.003139110282063484, 0.0014911405742168427, 0.00048525549937039614, 0.00014996265235822648, 6.554614083142951e-05, 3.72645263269078e-05, 2.5009081582538784e-05, 1.8660546629689634e-05, 1.4846259546175133e-05, 1.2180876183265354e-05, 1.0047954674519133e-05, 8.186187187675387e-06, 6.521731847897172e-06], "l1-model.layers.0.mlp.down_proj.weight": [89803.765625], "l2-model.layers.0.mlp.down_proj.weight": [15.994064331054688], "linf-model.layers.0.mlp.down_proj.weight": [0.007800102233886719], "request": {"prompt": "{}, whose the capital city is", "subject": "Madeira", "target_new": {"str": "Gaborone"}, "old_answer": {"str": "Funchal"}, "seed": 42}}, {"loss_per_step": [5.152, 1.134, 0.112, 0.043, 0.023, 0.025, 0.017, 0.014, 0.011, 0.009], "prob_new": [0.21288073062896729, 0.5668197274208069, 0.9032074809074402, 0.9595150947570801, 0.9773480892181396, 0.9753136038780212, 0.9836399555206299, 0.9866148233413696, 0.9889098405838013, 0.9910122752189636], "prob_old": [0.9558717608451843, 0.6768864393234253, 0.33672070503234863, 0.3258742690086365, 0.3202492892742157, 0.316528856754303, 0.31229931116104126, 0.3113143742084503, 0.31306466460227966, 0.315868079662323], "prob_new_token": [1.2327059266681317e-05, 0.05112292617559433, 0.7282205820083618, 0.8906395435333252, 0.9502972364425659, 0.9428419470787048, 0.9629600048065186, 0.9691867828369141, 0.9745198488235474, 0.9796492457389832], "prob_old_token": [0.8699713349342346, 0.04493895173072815, 0.00010880236368393525, 8.59401625348255e-05, 3.59490150003694e-05, 3.639192073023878e-05, 2.2046730009606108e-05, 1.832130510592833e-05, 1.6852996850502677e-05, 1.5632145732524805e-05], "l1-model.layers.0.mlp.down_proj.weight": [77411.1875], "l2-model.layers.0.mlp.down_proj.weight": [13.113142013549805], "linf-model.layers.0.mlp.down_proj.weight": [0.004499168135225773], "request": {"prompt": "{}, whose the capital city is", "subject": "Madeira", "target_new": {"str": "Dhaka"}, "old_answer": {"str": "Funchal"}, "seed": 42}}, {"loss_per_step": [8.007, 4.665, 2.936, 2.386, 1.269, 0.544, 0.305, 0.138, 0.059, 0.032, 0.02, 0.012, 0.008], "prob_new": [0.0035749729722738266, 0.06310576945543289, 0.4106777012348175, 0.35863950848579407, 0.43332892656326294, 0.5998725295066833, 0.769270658493042, 0.8782714605331421, 0.9443707466125488, 0.9691161513328552, 0.9805951118469238, 0.9876105785369873, 0.9921557903289795], "prob_old": [0.9558717608451843, 0.8271965980529785, 0.4984970986843109, 0.42942413687705994, 0.4028564691543579, 0.337826669216156, 0.37222355604171753, 0.3951440453529358, 0.4008774757385254, 0.39210018515586853, 0.37928685545921326, 0.3689497411251068, 0.3619752824306488], "prob_new_token": [1.553952824906446e-05, 0.0007074333843775094, 0.0034458974841982126, 0.012011936865746975, 0.10355135798454285, 0.7524751424789429, 0.5501940846443176, 0.7631940841674805, 0.8943945169448853, 0.9442304968833923, 0.9679329991340637, 0.9822165966033936, 0.9907119870185852], "prob_old_token": [0.8699713349342346, 0.48519203066825867, 0.004053690005093813, 0.004198961891233921, 0.00523547176271677, 0.0006610547425225377, 0.0003297041403129697, 0.00020954752108082175, 0.00010531747830100358, 5.502882777363993e-05, 2.928873072960414e-05, 1.468035134166712e-05, 6.929587925696978e-06], "l1-model.layers.0.mlp.down_proj.weight": [82562.7734375], "l2-model.layers.0.mlp.down_proj.weight": [14.531648635864258], "linf-model.layers.0.mlp.down_proj.weight": [0.005788683891296387], "request": {"prompt": "{}, whose the capital city is", "subject": "Madeira", "target_new": {"str": "Juba"}, "old_answer": {"str": "Funchal"}, "seed": 42}}, {"loss_per_step": [6.129, 3.364, 2.926, 1.018, 0.081, 0.057, 0.038, 0.03, 0.026, 0.023, 0.021, 0.02, 0.017, 0.013, 0.009], "prob_new": [0.2189430147409439, 0.4790489375591278, 0.4136691391468048, 0.5643295645713806, 0.9253503084182739, 0.9459894299507141, 0.9633426070213318, 0.9706681966781616, 0.9747954607009888, 0.9776807427406311, 0.9793665409088135, 0.9806164503097534, 0.9834024906158447, 0.9873120784759521, 0.9906521439552307], "prob_old": [0.773881196975708, 0.48037251830101013, 0.010571314953267574, 0.013511714525520802, 0.00020329089602455497, 0.00022371661907527596, 0.00012217041512485594, 7.517002086387947e-05, 4.810927202925086e-05, 3.316047877888195e-05, 2.7919635613216087e-05, 2.6837200493901037e-05, 2.3111584596335888e-05, 1.649429577810224e-05, 1.0755677976703737e-05], "prob_new_token": [1.0830311111931223e-05, 0.0012520442251116037, 0.003488154849037528, 0.13096633553504944, 0.8524335622787476, 0.8936945796012878, 0.9284182190895081, 0.9433623552322388, 0.9518892765045166, 0.9577213525772095, 0.9609497785568237, 0.9632004499435425, 0.9684943556785583, 0.9760652184486389, 0.9825420379638672], "prob_old_token": [0.773881196975708, 0.48037251830101013, 0.010571314953267574, 0.013511714525520802, 0.00020329089602455497, 0.00022371661907527596, 0.00012217041512485594, 7.517002086387947e-05, 4.810927202925086e-05, 3.316047877888195e-05, 2.7919635613216087e-05, 2.6837200493901037e-05, 2.3111584596335888e-05, 1.649429577810224e-05, 1.0755677976703737e-05], "l1-model.layers.0.mlp.down_proj.weight": [84311.1328125], "l2-model.layers.0.mlp.down_proj.weight": [15.000716209411621], "linf-model.layers.0.mlp.down_proj.weight": [0.006543752737343311], "request": {"prompt": "The original language of work of {} is", "subject": "Melodifestivalen", "target_new": {"str": "Romanian"}, "old_answer": {"str": "Swedish"}, "seed": 42}}, {"loss_per_step": [3.649, 4.289, 1.343, 0.511, 0.125, 0.045, 0.029, 0.024, 0.021, 0.019, 0.018, 0.017, 0.016, 0.015, 0.013, 0.012, 0.01, 0.009], "prob_new": [0.02600996568799019, 0.01371787115931511, 0.26106566190719604, 0.6001626253128052, 0.8828462362289429, 0.9561032056808472, 0.971068263053894, 0.9764294624328613, 0.9792190194129944, 0.9811875224113464, 0.9826231598854065, 0.9835985898971558, 0.9844180345535278, 0.9853848218917847, 0.9866833090782166, 0.9882486462593079, 0.9898717999458313, 0.9913619160652161], "prob_old": [0.773881196975708, 0.02519325353205204, 0.17572276294231415, 0.13344727456569672, 0.020532485097646713, 0.001334255444817245, 0.0004712699737865478, 0.00030449143378064036, 0.00025858814478851855, 0.0002514227235224098, 0.0002692735579330474, 0.0002991590299643576, 0.00032150649349205196, 0.0003176232858095318, 0.00028115237364545465, 0.00022413369151763618, 0.0001653419603826478, 0.00011688406812027097], "prob_new_token": [0.02600996568799019, 0.01371787115931511, 0.26106566190719604, 0.6001626253128052, 0.8828462362289429, 0.9561032056808472, 0.971068263053894, 0.9764294624328613, 0.9792190194129944, 0.9811875224113464, 0.9826231598854065, 0.9835985898971558, 0.9844180345535278, 0.9853848218917847, 0.9866833090782166, 0.9882486462593079, 0.9898717999458313, 0.9913619160652161], "prob_old_token": [0.773881196975708, 0.02519325353205204, 0.17572276294231415, 0.13344727456569672, 0.020532485097646713, 0.001334255444817245, 0.0004712699737865478, 0.00030449143378064036, 0.00025858814478851855, 0.0002514227235224098, 0.0002692735579330474, 0.0002991590299643576, 0.00032150649349205196, 0.0003176232858095318, 0.00028115237364545465, 0.00022413369151763618, 0.0001653419603826478, 0.00011688406812027097], "l1-model.layers.0.mlp.down_proj.weight": [98236.140625], "l2-model.layers.0.mlp.down_proj.weight": [16.964860916137695], "linf-model.layers.0.mlp.down_proj.weight": [0.008324358612298965], "request": {"prompt": "The original language of work of {} is", "subject": "Melodifestivalen", "target_new": {"str": "English"}, "old_answer": {"str": "Swedish"}, "seed": 42}}, {"loss_per_step": [9.855, 6.186, 1.777, 1.145, 0.452, 0.326, 0.217, 0.158, 0.129, 0.112, 0.097, 0.083, 0.07, 0.06, 0.051, 0.043, 0.037, 0.031, 0.025, 0.021], "prob_new": [5.2486044296529144e-05, 0.002057695295661688, 0.16906332969665527, 0.31818079948425293, 0.6365962624549866, 0.7215242385864258, 0.804901123046875, 0.8534308671951294, 0.8793805241584778, 0.8943859338760376, 0.9073693156242371, 0.9202132821083069, 0.9321152567863464, 0.942042350769043, 0.9503157138824463, 0.9575526118278503, 0.9639948010444641, 0.9697385430335999, 0.9748845100402832, 0.9794573187828064], "prob_old": [0.773881196975708, 0.3079207241535187, 0.18208159506320953, 0.0002010167809203267, 0.00133598898537457, 0.0005933513748459518, 0.0001746747293509543, 5.766740650869906e-05, 2.8826732886955142e-05, 2.1338282749638893e-05, 1.9017090380657464e-05, 1.8075506886816584e-05, 1.745256122376304e-05, 1.6904006770346314e-05, 1.6292851796606556e-05, 1.5491823432967067e-05, 1.4478342563961633e-05, 1.3295035387272947e-05, 1.1989284757873975e-05, 1.062313276634086e-05], "prob_new_token": [5.2486044296529144e-05, 0.002057695295661688, 0.16906332969665527, 0.31818079948425293, 0.6365962624549866, 0.7215242385864258, 0.804901123046875, 0.8534308671951294, 0.8793805241584778, 0.8943859338760376, 0.9073693156242371, 0.9202132821083069, 0.9321152567863464, 0.942042350769043, 0.9503157138824463, 0.9575526118278503, 0.9639948010444641, 0.9697385430335999, 0.9748845100402832, 0.9794573187828064], "prob_old_token": [0.773881196975708, 0.3079207241535187, 0.18208159506320953, 0.0002010167809203267, 0.00133598898537457, 0.0005933513748459518, 0.0001746747293509543, 5.766740650869906e-05, 2.8826732886955142e-05, 2.1338282749638893e-05, 1.9017090380657464e-05, 1.8075506886816584e-05, 1.745256122376304e-05, 1.6904006770346314e-05, 1.6292851796606556e-05, 1.5491823432967067e-05, 1.4478342563961633e-05, 1.3295035387272947e-05, 1.1989284757873975e-05, 1.062313276634086e-05], "l1-model.layers.0.mlp.down_proj.weight": [95281.03125], "l2-model.layers.0.mlp.down_proj.weight": [16.946184158325195], "linf-model.layers.0.mlp.down_proj.weight": [0.008758483454585075], "request": {"prompt": "The original language of work of {} is", "subject": "Melodifestivalen", "target_new": {"str": "Japanese"}, "old_answer": {"str": "Swedish"}, "seed": 42}}, {"loss_per_step": [3.309, 1.206, 0.672, 0.322, 0.249, 0.195, 0.135, 0.096, 0.071, 0.048, 0.035, 0.024, 0.015, 0.011, 0.007], "prob_new": [0.4706716537475586, 0.49783119559288025, 0.5815265774726868, 0.7487999796867371, 0.7938979864120483, 0.8315839171409607, 0.8777126669883728, 0.9103732109069824, 0.9327559471130371, 0.953371524810791, 0.9654971957206726, 0.9766149520874023, 0.9849554300308228, 0.9893949627876282, 0.9926765561103821], "prob_old": [0.9521257877349854, 0.6927645206451416, 0.6762049198150635, 0.6700000762939453, 0.6648150682449341, 0.6688319444656372, 0.6883437633514404, 0.6858699321746826, 0.6829158067703247, 0.6777348518371582, 0.6776968240737915, 0.673444926738739, 0.6663300395011902, 0.6609702110290527, 0.6467798948287964], "prob_new_token": [0.027645083144307137, 0.10805024206638336, 0.17641617357730865, 0.4400084614753723, 0.5896551609039307, 0.6608022451400757, 0.7579612731933594, 0.8091532588005066, 0.8559242486953735, 0.8945450186729431, 0.9277374744415283, 0.9571148157119751, 0.976152241230011, 0.9839580059051514, 0.9887301325798035], "prob_old_token": [0.8340222239494324, 0.11274439841508865, 0.0006004847818985581, 0.000512146158143878, 0.00027490415959618986, 9.954241249943152e-05, 5.0522918172646314e-05, 3.118902168353088e-05, 1.8621381968841888e-05, 1.0839725291589275e-05, 7.308104613912292e-06, 4.653365976992063e-06, 2.6907559913524892e-06, 1.6382449530283338e-06, 1.046192323883588e-06], "l1-model.layers.0.mlp.down_proj.weight": [89366.1953125], "l2-model.layers.0.mlp.down_proj.weight": [15.690715789794922], "linf-model.layers.0.mlp.down_proj.weight": [0.00704924575984478], "request": {"prompt": "{} was originally aired on", "subject": "Rugrats", "target_new": {"str": "the Sci-Fi Channel"}, "old_answer": {"str": "Nickelodeon"}, "seed": 42}}, {"loss_per_step": [3.351, 1.857, 2.809, 1.047, 0.609, 0.444, 0.321, 0.21, 0.142, 0.104, 0.067, 0.046, 0.035, 0.028, 0.022, 0.018, 0.014, 0.011, 0.009], "prob_new": [0.20045112073421478, 0.27217015624046326, 0.2828768491744995, 0.4211435914039612, 0.6014436483383179, 0.6812130808830261, 0.7441126704216003, 0.8177857398986816, 0.870762050151825, 0.9032020568847656, 0.9365574717521667, 0.9554075002670288, 0.9660589098930359, 0.9728634357452393, 0.9781837463378906, 0.9825963377952576, 0.9861077070236206, 0.9888018369674683, 0.9908377528190613], "prob_old": [0.9521257877349854, 0.7241578102111816, 0.618411123752594, 0.7097449898719788, 0.7389918565750122, 0.7479047775268555, 0.7474654912948608, 0.7403936386108398, 0.7367572784423828, 0.736333966255188, 0.7355900406837463, 0.735171377658844, 0.735268771648407, 0.7358031272888184, 0.7364577054977417, 0.7371079325675964, 0.7378062605857849, 0.7386117577552795, 0.7395167946815491], "prob_new_token": [0.02764512225985527, 0.1299847960472107, 0.26481834053993225, 0.1497463434934616, 0.2769339084625244, 0.38277551531791687, 0.5227435827255249, 0.6661564111709595, 0.7702860236167908, 0.817903995513916, 0.8781576752662659, 0.912132740020752, 0.9315292239189148, 0.944762110710144, 0.9562981724739075, 0.9664021730422974, 0.9744005799293518, 0.9802930355072021, 0.9845142364501953], "prob_old_token": [0.8340222239494324, 0.098426952958107, 0.0008724355138838291, 0.013289567083120346, 0.036829955875873566, 0.05774494633078575, 0.05560414865612984, 0.027812084183096886, 0.009006885811686516, 0.0034700571559369564, 0.0013942483346909285, 0.0007085551624186337, 0.0004999469965696335, 0.0004136092320550233, 0.0003294451453257352, 0.00024418742395937443, 0.00017296352598350495, 0.00012076689017703757, 8.578113920520991e-05], "l1-model.layers.0.mlp.down_proj.weight": [95405.4765625], "l2-model.layers.0.mlp.down_proj.weight": [17.03182029724121], "linf-model.layers.0.mlp.down_proj.weight": [0.00853394903242588], "request": {"prompt": "{} was originally aired on", "subject": "Rugrats", "target_new": {"str": "the USA Network"}, "old_answer": {"str": "Nickelodeon"}, "seed": 42}}, {"loss_per_step": [4.551, 1.586, 0.523, 0.247, 0.374, 0.162, 0.166, 0.149, 0.124, 0.102, 0.084, 0.07, 0.059, 0.05, 0.041, 0.034, 0.027, 0.022, 0.018, 0.015], "prob_new": [0.03765115141868591, 0.33751505613327026, 0.6521351337432861, 0.8079342246055603, 0.7200530767440796, 0.8609568476676941, 0.8583707213401794, 0.8706790804862976, 0.8891074061393738, 0.9065391421318054, 0.92144775390625, 0.9336873888969421, 0.9435622096061707, 0.9521991014480591, 0.9601671695709229, 0.9672712087631226, 0.9733380079269409, 0.9783675074577332, 0.9824156165122986, 0.985634982585907], "prob_old": [0.9521257877349854, 0.7113886475563049, 0.6724390983581543, 0.7281210422515869, 0.7456400394439697, 0.7072224617004395, 0.6905561685562134, 0.682669460773468, 0.6787258386611938, 0.6767504215240479, 0.6756362915039062, 0.6748611927032471, 0.67491614818573, 0.6767354011535645, 0.6801424026489258, 0.6837211847305298, 0.6860932111740112, 0.6868149042129517, 0.686145544052124, 0.6845825910568237], "prob_new_token": [0.02764512225985527, 0.12816354632377625, 0.3324413001537323, 0.5394455194473267, 0.6978197693824768, 0.6891205906867981, 0.67535799741745, 0.705837070941925, 0.7520647048950195, 0.7964610457420349, 0.8357717394828796, 0.8692266345024109, 0.8958595991134644, 0.916436493396759, 0.9322075843811035, 0.9443903565406799, 0.9542404413223267, 0.9624514579772949, 0.9691545963287354, 0.9745215177536011], "prob_old_token": [0.8340222239494324, 0.1305101215839386, 0.019664917141199112, 0.016079341992735863, 0.0916360542178154, 0.0018572822446003556, 0.0004511324514169246, 0.00027606167714111507, 0.00023800478084012866, 0.0002419786324026063, 0.0002672078553587198, 0.0003122856141999364, 0.00037932448321953416, 0.00044860009802505374, 0.00048154324758797884, 0.00046230002772063017, 0.0004142193356528878, 0.00036207702942192554, 0.0003153006255161017, 0.00027484280872158706], "l1-model.layers.0.mlp.down_proj.weight": [99407.2578125], "l2-model.layers.0.mlp.down_proj.weight": [17.64164924621582], "linf-model.layers.0.mlp.down_proj.weight": [0.009477846324443817], "request": {"prompt": "{} was originally aired on", "subject": "Rugrats", "target_new": {"str": "the CW"}, "old_answer": {"str": "Nickelodeon"}, "seed": 42}}, {"loss_per_step": [11.977, 2.843, 0.195, 0.048, 0.022, 0.014, 0.01], "prob_new": [6.290205874392996e-06, 0.05822571739554405, 0.8225728273391724, 0.9532819986343384, 0.9779837727546692, 0.9864766597747803, 0.9901980757713318], "prob_old": [0.7823527455329895, 0.028118308633565903, 0.001168584916740656, 0.00011280945909675211, 3.5880271752830595e-05, 2.0511826733127236e-05, 1.546124076412525e-05], "prob_new_token": [6.290205874392996e-06, 0.05822571739554405, 0.8225728273391724, 0.9532819986343384, 0.9779837727546692, 0.9864766597747803, 0.9901980757713318], "prob_old_token": [0.7823527455329895, 0.028118308633565903, 0.001168584916740656, 0.00011280945909675211, 3.5880271752830595e-05, 2.0511826733127236e-05, 1.546124076412525e-05], "l1-model.layers.0.mlp.down_proj.weight": [55067.38671875], "l2-model.layers.0.mlp.down_proj.weight": [9.731605529785156], "linf-model.layers.0.mlp.down_proj.weight": [0.002971009351313114], "request": {"prompt": "{} has originated in the country named", "subject": "Shar Pei", "target_new": {"str": "Italy"}, "old_answer": {"str": "China"}, "seed": 42}}, {"loss_per_step": [11.487, 2.819, 0.089, 0.076, 0.058, 0.023, 0.013, 0.01, 0.009], "prob_new": [1.0261817806167528e-05, 0.059672679752111435, 0.9146718382835388, 0.9271416664123535, 0.9438216686248779, 0.9772124290466309, 0.9869994521141052, 0.9897655248641968, 0.9910703301429749], "prob_old": [0.7823527455329895, 0.018008515238761902, 0.0008743791840970516, 0.0004399104218464345, 0.0004372836265247315, 0.0002078170800814405, 0.00012859786511398852, 0.00010275758540956303, 9.083333861781284e-05], "prob_new_token": [1.0261817806167528e-05, 0.059672679752111435, 0.9146718382835388, 0.9271416664123535, 0.9438216686248779, 0.9772124290466309, 0.9869994521141052, 0.9897655248641968, 0.9910703301429749], "prob_old_token": [0.7823527455329895, 0.018008515238761902, 0.0008743791840970516, 0.0004399104218464345, 0.0004372836265247315, 0.0002078170800814405, 0.00012859786511398852, 0.00010275758540956303, 9.083333861781284e-05], "l1-model.layers.0.mlp.down_proj.weight": [68989.984375], "l2-model.layers.0.mlp.down_proj.weight": [11.908719062805176], "linf-model.layers.0.mlp.down_proj.weight": [0.0038974545896053314], "request": {"prompt": "{} has originated in the country named", "subject": "Shar Pei", "target_new": {"str": "Spain"}, "old_answer": {"str": "China"}, "seed": 42}}, {"loss_per_step": [9.955, 2.162, 1.548, 0.218, 0.12, 0.09, 0.073, 0.061, 0.053, 0.047, 0.043, 0.04, 0.037, 0.033, 0.03, 0.027, 0.024, 0.022, 0.019, 0.017], "prob_new": [4.750975494971499e-05, 0.1150735467672348, 0.21257594227790833, 0.8040497899055481, 0.886684000492096, 0.9140498042106628, 0.9295873045921326, 0.9404017329216003, 0.9481999278068542, 0.9537726640701294, 0.9577630758285522, 0.960962176322937, 0.964008092880249, 0.96718430519104, 0.9703899621963501, 0.9733861684799194, 0.9761102199554443, 0.9786038994789124, 0.9808969497680664, 0.9829931259155273], "prob_old": [0.7823527455329895, 0.13974691927433014, 0.007630697451531887, 0.005203991197049618, 0.002304163295775652, 0.001852237619459629, 0.001792206079699099, 0.001740476000122726, 0.0015778410015627742, 0.0013190681347623467, 0.001049030339345336, 0.0008237495203502476, 0.0006611880962736905, 0.00055371766211465, 0.0004844448994845152, 0.00043935191933996975, 0.0004097417404409498, 0.0003897183632943779, 0.000375445670215413, 0.0003645468968898058], "prob_new_token": [4.750975494971499e-05, 0.1150735467672348, 0.21257594227790833, 0.8040497899055481, 0.886684000492096, 0.9140498042106628, 0.9295873045921326, 0.9404017329216003, 0.9481999278068542, 0.9537726640701294, 0.9577630758285522, 0.960962176322937, 0.964008092880249, 0.96718430519104, 0.9703899621963501, 0.9733861684799194, 0.9761102199554443, 0.9786038994789124, 0.9808969497680664, 0.9829931259155273], "prob_old_token": [0.7823527455329895, 0.13974691927433014, 0.007630697451531887, 0.005203991197049618, 0.002304163295775652, 0.001852237619459629, 0.001792206079699099, 0.001740476000122726, 0.0015778410015627742, 0.0013190681347623467, 0.001049030339345336, 0.0008237495203502476, 0.0006611880962736905, 0.00055371766211465, 0.0004844448994845152, 0.00043935191933996975, 0.0004097417404409498, 0.0003897183632943779, 0.000375445670215413, 0.0003645468968898058], "l1-model.layers.0.mlp.down_proj.weight": [96446.09375], "l2-model.layers.0.mlp.down_proj.weight": [16.94280242919922], "linf-model.layers.0.mlp.down_proj.weight": [0.00946854054927826], "request": {"prompt": "{} has originated in the country named", "subject": "Shar Pei", "target_new": {"str": "Japan"}, "old_answer": {"str": "China"}, "seed": 42}}, {"loss_per_step": [5.523, 4.065, 1.934, 2.414, 0.838, 0.365, 0.147, 0.056, 0.026, 0.013, 0.008], "prob_new": [0.4938949942588806, 0.40385785698890686, 0.5075593590736389, 0.503754734992981, 0.5932492017745972, 0.7406577467918396, 0.8725499510765076, 0.9466592073440552, 0.9746642112731934, 0.9876212477684021, 0.9920834302902222], "prob_old": [0.9293187856674194, 0.6088732481002808, 0.6813596487045288, 0.7063093185424805, 0.7191619277000427, 0.7266954183578491, 0.7303719520568848, 0.7311732769012451, 0.7297708988189697, 0.7305158376693726, 0.7281262874603271], "prob_new_token": [1.6136593330884352e-05, 0.00036495961830951273, 0.02100643515586853, 0.008001517504453659, 0.18720829486846924, 0.48211556673049927, 0.7459843754768372, 0.894208550453186, 0.9502344131469727, 0.9761523008346558, 0.9850987195968628], "prob_old_token": [0.7632028460502625, 0.0402299202978611, 0.028552258387207985, 0.00026532370247878134, 0.001050746999680996, 0.001159356557764113, 0.001634285319596529, 0.0007547340355813503, 0.0002675694413483143, 9.727333963382989e-05, 3.8904974644538015e-05], "l1-model.layers.0.mlp.down_proj.weight": [70913.46875], "l2-model.layers.0.mlp.down_proj.weight": [12.745726585388184], "linf-model.layers.0.mlp.down_proj.weight": [0.004877978935837746], "request": {"prompt": "{} was founded in the location of", "subject": "China Southern Airlines", "target_new": {"str": "Dubai"}, "old_answer": {"str": "Guangzhou"}, "seed": 42}}, {"loss_per_step": [9.34, 7.578, 6.425, 5.524, 4.728, 3.437, 2.235, 0.998, 0.394, 0.217, 0.067, 0.047, 0.027, 0.013, 0.008], "prob_new": [0.0007351022795774043, 0.003111169906333089, 0.002700229873880744, 0.0060632601380348206, 0.019467420876026154, 0.2808956503868103, 0.4931572377681732, 0.5589972734451294, 0.7161292433738708, 0.8165453672409058, 0.9354326725006104, 0.9543629884719849, 0.9734433889389038, 0.9866846203804016, 0.9924941062927246], "prob_old": [0.9293187856674194, 0.6670454144477844, 0.5781362056732178, 0.6007652878761292, 0.5699019432067871, 0.5042084455490112, 0.4949423670768738, 0.5303184390068054, 0.5495070219039917, 0.5823357701301575, 0.5357754230499268, 0.5233723521232605, 0.5025556087493896, 0.4854006767272949, 0.4765271544456482], "prob_new_token": [0.0014649422373622656, 0.006179963704198599, 0.004859622102230787, 0.01062782108783722, 0.0021255803294479847, 0.0018463128944858909, 0.011742954142391682, 0.13880103826522827, 0.4745064973831177, 0.6788994669914246, 0.9049422144889832, 0.9410563111305237, 0.9638406038284302, 0.9791007041931152, 0.9869417548179626], "prob_old_token": [0.7632028460502625, 0.03946629911661148, 0.004510832019150257, 0.00853071641176939, 9.294057235820219e-05, 3.6733737942995504e-05, 3.489911250653677e-05, 6.172223220346496e-05, 2.0594818124664016e-05, 1.8225962776341476e-05, 5.107484639665927e-07, 2.120706454888932e-07, 1.2160876394773368e-07, 7.109210287126189e-08, 4.857056623563949e-08], "l1-model.layers.0.mlp.down_proj.weight": [87991.8125], "l2-model.layers.0.mlp.down_proj.weight": [15.741693496704102], "linf-model.layers.0.mlp.down_proj.weight": [0.006744851358234882], "request": {"prompt": "{} was founded in the location of", "subject": "China Southern Airlines", "target_new": {"str": "Jena"}, "old_answer": {"str": "Guangzhou"}, "seed": 42}}, {"loss_per_step": [7.946, 5.626, 2.551, 1.433, 0.538, 0.1, 0.017, 0.013, 0.012, 0.012, 0.011, 0.009], "prob_new": [0.4137546718120575, 0.33481645584106445, 0.4218161702156067, 0.25184565782546997, 0.6612471342086792, 0.908151388168335, 0.9830981492996216, 0.9874600172042847, 0.9880618453025818, 0.9885151386260986, 0.9895613193511963, 0.9912354350090027], "prob_old": [0.9293187856674194, 0.5953702330589294, 0.6508386135101318, 0.4757899343967438, 0.50113445520401, 0.5050745606422424, 0.5021057724952698, 0.5012425780296326, 0.49971094727516174, 0.49791041016578674, 0.49590617418289185, 0.49394333362579346], "prob_new_token": [1.5147047349728382e-07, 1.937246270244941e-05, 0.007270531263202429, 0.33267199993133545, 0.3512527048587799, 0.8299943208694458, 0.9728128910064697, 0.979507565498352, 0.979444146156311, 0.9794920682907104, 0.9809683561325073, 0.9838743805885315], "prob_old_token": [0.7632028460502625, 0.02781888097524643, 0.0012041537556797266, 0.0006806311430409551, 0.000814133498352021, 0.0003450975054875016, 4.377864024718292e-05, 3.059854861930944e-05, 3.3115004043793306e-05, 3.830911373370327e-05, 4.16676739405375e-05, 4.075943070347421e-05], "l1-model.layers.0.mlp.down_proj.weight": [75246.78125], "l2-model.layers.0.mlp.down_proj.weight": [13.471625328063965], "linf-model.layers.0.mlp.down_proj.weight": [0.005074547603726387], "request": {"prompt": "{} was founded in the location of", "subject": "China Southern Airlines", "target_new": {"str": "Bremen"}, "old_answer": {"str": "Guangzhou"}, "seed": 42}}, {"loss_per_step": [3.487, 2.506, 2.077, 1.666, 1.4, 1.174, 0.863, 0.565, 0.205, 0.02, 0.013, 0.01], "prob_new": [0.42314139008522034, 0.45732957124710083, 0.5644364356994629, 0.6734758615493774, 0.7408850789070129, 0.770890474319458, 0.778607964515686, 0.7308055758476257, 0.8655826449394226, 0.9807245135307312, 0.9870836138725281, 0.9901138544082642], "prob_old": [0.8802522420883179, 0.5753676295280457, 0.5241140723228455, 0.493631511926651, 0.43675142526626587, 0.4396483600139618, 0.2657616138458252, 0.24820977449417114, 0.24781100451946259, 0.2505163550376892, 0.25266531109809875, 0.2536998391151428], "prob_new_token": [6.021196440997301e-06, 0.0001868028484750539, 0.00020234216935932636, 0.0005331069696694613, 0.0012636566534638405, 0.0033039171248674393, 0.015185986645519733, 0.10618959367275238, 0.3776971101760864, 0.9485265016555786, 0.9809190034866333, 0.9874285459518433], "prob_old_token": [0.6327256560325623, 0.002315027639269829, 0.011393781751394272, 0.007940566167235374, 0.004868312273174524, 0.006450059358030558, 0.00875084474682808, 0.007283540442585945, 0.0074530900456011295, 0.00038352105184458196, 0.0003383299626875669, 0.000427675258833915], "l1-model.layers.0.mlp.down_proj.weight": [84493.1875], "l2-model.layers.0.mlp.down_proj.weight": [14.602341651916504], "linf-model.layers.0.mlp.down_proj.weight": [0.00544775789603591], "request": {"prompt": "{} is lead by the person named", "subject": "Dyson", "target_new": {"str": "Judd Apatow"}, "old_answer": {"str": "James Dyson"}, "seed": 42}}, {"loss_per_step": [7.033, 5.736, 4.495, 4.186, 3.182, 2.333, 1.232, 0.139, 0.041, 0.025, 0.016, 0.011, 0.01], "prob_new": [0.2846667468547821, 0.30434513092041016, 0.3177492022514343, 0.3284779489040375, 0.33966338634490967, 0.3564755916595459, 0.439285010099411, 0.8749462962150574, 0.9603108763694763, 0.9760677218437195, 0.9845070838928223, 0.988628625869751, 0.9904693365097046], "prob_old": [0.8802522420883179, 0.5704739093780518, 0.504372775554657, 0.5204373598098755, 0.5458739995956421, 0.5399067997932434, 0.5322529077529907, 0.534912645816803, 0.432431161403656, 0.35639116168022156, 0.3298260569572449, 0.3175920248031616, 0.310703843832016], "prob_new_token": [0.00011093316425103694, 0.0001431057316949591, 0.004967433400452137, 0.005550127476453781, 0.02738814987242222, 0.060797788202762604, 0.12579229474067688, 0.780534029006958, 0.9024230241775513, 0.939162015914917, 0.9625117778778076, 0.9744605422019958, 0.9800634980201721], "prob_old_token": [0.6327256560325623, 0.00041886366670951247, 0.006268232595175505, 0.009260455146431923, 0.01830473355948925, 0.011237744241952896, 0.004847605712711811, 0.0018029685597866774, 0.001722239889204502, 0.001147428760305047, 0.0005374042666517198, 0.0002867558505386114, 0.00020456239872146398], "l1-model.layers.0.mlp.down_proj.weight": [81529.09375], "l2-model.layers.0.mlp.down_proj.weight": [14.521800994873047], "linf-model.layers.0.mlp.down_proj.weight": [0.005769913084805012], "request": {"prompt": "{} is lead by the person named", "subject": "Dyson", "target_new": {"str": "George Friedman"}, "old_answer": {"str": "James Dyson"}, "seed": 42}}, {"loss_per_step": [6.315, 6.984, 4.966, 4.466, 3.705, 1.827, 0.718, 0.112, 0.081, 0.048, 0.022, 0.012, 0.008], "prob_new": [0.1324782520532608, 0.1812172830104828, 0.16678068041801453, 0.2563568949699402, 0.33501896262168884, 0.4111352860927582, 0.6862074136734009, 0.8989652395248413, 0.9238297343254089, 0.9537741541862488, 0.9781523942947388, 0.9881240129470825, 0.9920819997787476], "prob_old": [0.8802522420883179, 0.5756543278694153, 0.46181848645210266, 0.524768590927124, 0.42982059717178345, 0.29550689458847046, 0.2670780122280121, 0.27146992087364197, 0.26758280396461487, 0.2680930495262146, 0.2721920907497406, 0.27707937359809875, 0.28046485781669617], "prob_new_token": [0.00022606723359785974, 1.681441062828526e-06, 0.0009073485853150487, 0.0012508454965427518, 0.003951089456677437, 0.01904897391796112, 0.12428011745214462, 0.7695983052253723, 0.9232045412063599, 0.964752197265625, 0.9812199473381042, 0.9885669946670532, 0.9920884370803833], "prob_old_token": [0.6327256560325623, 3.1848816433921456e-05, 0.006048407405614853, 0.02416340261697769, 0.04547468200325966, 0.017179911956191063, 0.004755726549774408, 0.0003240249934606254, 4.1865358070936054e-05, 1.0658793144102674e-05, 3.7816409985680366e-06, 1.8209673271485372e-06, 1.1480437933641952e-06], "l1-model.layers.0.mlp.down_proj.weight": [81458.25], "l2-model.layers.0.mlp.down_proj.weight": [14.504332542419434], "linf-model.layers.0.mlp.down_proj.weight": [0.005730777978897095], "request": {"prompt": "{} is lead by the person named", "subject": "Dyson", "target_new": {"str": "Marc Mayer"}, "old_answer": {"str": "James Dyson"}, "seed": 42}}, {"loss_per_step": [3.563, 1.809, 2.753, 1.385, 1.248, 0.141, 0.105, 0.048, 0.027, 0.016, 0.01], "prob_new": [0.5048718452453613, 0.6294379830360413, 0.657774806022644, 0.6912938356399536, 0.718141496181488, 0.8817874193191528, 0.9057195782661438, 0.9534392356872559, 0.9732292890548706, 0.9843295812606812, 0.9901081323623657], "prob_old": [0.714084267616272, 0.5420195460319519, 0.4838021695613861, 0.4900427460670471, 0.45206746459007263, 0.34260499477386475, 0.2981595993041992, 0.2853107750415802, 0.2648822069168091, 0.2552216351032257, 0.25240617990493774], "prob_new_token": [4.025532234663842e-06, 0.0013295974349603057, 2.5745242965058424e-05, 0.005116533953696489, 0.007827329449355602, 0.6407949328422546, 0.7458820939064026, 0.89939945936203, 0.9482116103172302, 0.9715016484260559, 0.9818117022514343], "prob_old_token": [0.6126298904418945, 0.21355082094669342, 0.003342708107084036, 0.036764953285455704, 0.044427353888750076, 0.010108930990099907, 0.0021548806689679623, 0.000341453735018149, 9.652426524553448e-05, 2.8420034141163342e-05, 8.909841199056245e-06], "l1-model.layers.0.mlp.down_proj.weight": [69239.03125], "l2-model.layers.0.mlp.down_proj.weight": [12.469256401062012], "linf-model.layers.0.mlp.down_proj.weight": [0.0049841273576021194], "request": {"prompt": "{} is lead by the person named", "subject": "Charles Schwab Corporation", "target_new": {"str": "Masayoshi Son"}, "old_answer": {"str": "Charles R Schwab"}, "seed": 42}}, {"loss_per_step": [5.602, 4.509, 3.799, 2.942, 2.448, 1.41, 0.725, 0.326, 0.068, 0.031, 0.024, 0.018, 0.015, 0.013, 0.011, 0.01], "prob_new": [0.20161043107509613, 0.24631479382514954, 0.23925678431987762, 0.30816900730133057, 0.3749079406261444, 0.49913278222084045, 0.6896283030509949, 0.8023363351821899, 0.9359121322631836, 0.9692584276199341, 0.9767227172851562, 0.98188316822052, 0.9853873252868652, 0.9873123168945312, 0.9888791441917419, 0.9905533194541931], "prob_old": [0.714084267616272, 0.4765166938304901, 0.519935667514801, 0.5160713791847229, 0.45976904034614563, 0.40782469511032104, 0.35032418370246887, 0.30131497979164124, 0.27526092529296875, 0.2686800956726074, 0.2675072252750397, 0.264970064163208, 0.262927383184433, 0.2614670693874359, 0.26020127534866333, 0.2588541805744171], "prob_new_token": [9.207190487359185e-06, 2.1012494471506216e-05, 0.0003366103919688612, 0.0011454317718744278, 0.001961056375876069, 0.008560290560126305, 0.05870840325951576, 0.2535552978515625, 0.8419502377510071, 0.9589512348175049, 0.977705180644989, 0.9871519804000854, 0.9910058379173279, 0.9915120005607605, 0.9894804358482361, 0.9865506887435913], "prob_old_token": [0.6126298904418945, 0.027577916160225868, 0.4894362688064575, 0.2589918375015259, 0.21358053386211395, 0.19643118977546692, 0.034127820283174515, 0.04375845938920975, 0.004399196244776249, 0.0008953855140134692, 0.0005048654857091606, 0.0003941951144952327, 0.00038861329085193574, 0.0004372252442408353, 0.0004412252746988088, 0.0003478869330137968], "l1-model.layers.0.mlp.down_proj.weight": [89455.21875], "l2-model.layers.0.mlp.down_proj.weight": [15.93997859954834], "linf-model.layers.0.mlp.down_proj.weight": [0.007003376260399818], "request": {"prompt": "{} is lead by the person named", "subject": "Charles Schwab Corporation", "target_new": {"str": "Riccardo Muti"}, "old_answer": {"str": "Charles R Schwab"}, "seed": 42}}, {"loss_per_step": [4.487, 2.789, 2.142, 1.318, 0.388, 0.045, 0.016, 0.011, 0.009], "prob_new": [0.5412984490394592, 0.6011533737182617, 0.726477324962616, 0.7959735989570618, 0.8255288004875183, 0.9585413336753845, 0.9841572046279907, 0.9888872504234314, 0.9907600283622742], "prob_old": [0.714084267616272, 0.4131997227668762, 0.33001285791397095, 0.2580166757106781, 0.24667572975158691, 0.24213530123233795, 0.23656558990478516, 0.22716636955738068, 0.19912368059158325], "prob_new_token": [4.529347563675401e-08, 1.0842775736819021e-05, 3.449962969170883e-05, 0.0014055180363357067, 0.14618676900863647, 0.8234054446220398, 0.971299409866333, 0.9872342944145203, 0.9859817028045654], "prob_old_token": [0.6126298904418945, 0.009827641770243645, 0.034070033580064774, 0.012220152653753757, 0.0022528620902448893, 0.0001059475980582647, 6.52099197395728e-06, 1.7403422134520952e-06, 1.1419037946325261e-06], "l1-model.layers.0.mlp.down_proj.weight": [69420.109375], "l2-model.layers.0.mlp.down_proj.weight": [12.144264221191406], "linf-model.layers.0.mlp.down_proj.weight": [0.0039988923817873], "request": {"prompt": "{} is lead by the person named", "subject": "Charles Schwab Corporation", "target_new": {"str": "Giorgio Armani"}, "old_answer": {"str": "Charles R Schwab"}, "seed": 42}}, {"loss_per_step": [3.441, 3.967, 0.601, 0.137, 0.067, 0.038, 0.024, 0.019, 0.02, 0.023, 0.019, 0.014, 0.011, 0.009], "prob_new": [0.47477248311042786, 0.1899285614490509, 0.6268519163131714, 0.8731206655502319, 0.9350327253341675, 0.9632847309112549, 0.9763709306716919, 0.9812202453613281, 0.9806032180786133, 0.9770766496658325, 0.981019139289856, 0.9862807393074036, 0.9893362522125244, 0.9910281300544739], "prob_old": [0.9123725891113281, 0.6948533058166504, 0.6718713641166687, 0.6830329895019531, 0.6942082643508911, 0.6906954050064087, 0.6823386549949646, 0.6736358404159546, 0.6728540658950806, 0.6761957406997681, 0.6749984622001648, 0.6726014018058777, 0.6719841361045837, 0.6727003455162048], "prob_new_token": [0.0010821707546710968, 0.0009454274550080299, 0.3227885663509369, 0.8299365043640137, 0.952540397644043, 0.9849759340286255, 0.9894167184829712, 0.9898439645767212, 0.985903263092041, 0.9798676371574402, 0.985921323299408, 0.992832362651825, 0.9958061575889587, 0.9970607757568359], "prob_old_token": [0.6529882550239563, 8.147436165018007e-05, 0.0004718411073554307, 5.840696394443512e-05, 1.996066748688463e-05, 6.600861524930224e-06, 4.006577455584193e-06, 3.210943305020919e-06, 3.853547241305932e-06, 4.973628620064119e-06, 3.0933663310861448e-06, 1.3086511216897634e-06, 5.866911578777945e-07, 2.9895608122387785e-07], "l1-model.layers.0.mlp.down_proj.weight": [86357.09375], "l2-model.layers.0.mlp.down_proj.weight": [14.820783615112305], "linf-model.layers.0.mlp.down_proj.weight": [0.0063723111525177956], "request": {"prompt": "{} holds a position at", "subject": "Darleane C. Hoffman", "target_new": {"str": "Columbia University"}, "old_answer": {"str": "Lawrence Berkeley National Laboratory"}, "seed": 42}}, {"loss_per_step": [1.192, 1.412, 1.018, 0.872, 0.179, 0.024, 0.018, 0.015, 0.012, 0.01, 0.01, 0.007], "prob_new": [0.7895854115486145, 0.6744512915611267, 0.7701718807220459, 0.8208377957344055, 0.8894317150115967, 0.9769330024719238, 0.9822840690612793, 0.9853230714797974, 0.9885150194168091, 0.9899674654006958, 0.9897705316543579, 0.9928151965141296], "prob_old": [0.9123725891113281, 0.640607476234436, 0.6254031658172607, 0.6279128789901733, 0.6343117952346802, 0.6299423575401306, 0.6260600090026855, 0.6252627372741699, 0.6253006458282471, 0.6253128051757812, 0.6255002021789551, 0.6261390447616577], "prob_new_token": [0.00041883750236593187, 0.0009032575180754066, 0.0017234401311725378, 0.0029406645335257053, 0.3117162883281708, 0.8958541750907898, 0.932366132736206, 0.9489496350288391, 0.9653029441833496, 0.9717260003089905, 0.9686045050621033, 0.981029212474823], "prob_old_token": [0.6529882550239563, 0.00028625063714571297, 0.0004845796211156994, 0.00028713210485875607, 0.00017127496539615095, 1.0247549653286114e-05, 2.4252153707493562e-06, 7.490617122130061e-07, 2.3134337823194073e-07, 1.0741847944473193e-07, 1.061530525703347e-07, 7.442264404744492e-08], "l1-model.layers.0.mlp.down_proj.weight": [81944.078125], "l2-model.layers.0.mlp.down_proj.weight": [14.208314895629883], "linf-model.layers.0.mlp.down_proj.weight": [0.005566575564444065], "request": {"prompt": "{} holds a position at", "subject": "Darleane C. Hoffman", "target_new": {"str": "Carnegie Mellon University"}, "old_answer": {"str": "Lawrence Berkeley National Laboratory"}, "seed": 42}}, {"loss_per_step": [4.146, 1.803, 1.125, 0.488, 0.495, 0.159, 0.082, 0.035, 0.017, 0.011, 0.008], "prob_new": [0.48203378915786743, 0.4618721902370453, 0.5785267949104309, 0.6728894114494324, 0.6880788803100586, 0.8656055331230164, 0.9251219034194946, 0.9662186503410339, 0.9831156134605408, 0.9893503189086914, 0.9918786883354187], "prob_old": [0.9123725891113281, 0.6887499094009399, 0.6656292676925659, 0.704617977142334, 0.7912547588348389, 0.7858603000640869, 0.7385767698287964, 0.7064029574394226, 0.691044807434082, 0.6834251284599304, 0.678229033946991], "prob_new_token": [0.20117510855197906, 0.019396629184484482, 0.5119395852088928, 0.6108075380325317, 0.792267918586731, 0.7567747235298157, 0.832115113735199, 0.9277514219284058, 0.9630927443504333, 0.9751931428909302, 0.9810684323310852], "prob_old_token": [0.6529882550239563, 0.00016168950241990387, 0.0005908721359446645, 3.6008750612381846e-05, 3.0922765290597454e-05, 4.9846756155602634e-05, 1.2746593711199239e-05, 1.6855110516189598e-06, 8.36393724057416e-07, 7.48796708194277e-07, 7.47694912206498e-07], "l1-model.layers.0.mlp.down_proj.weight": [78355.3125], "l2-model.layers.0.mlp.down_proj.weight": [13.44400405883789], "linf-model.layers.0.mlp.down_proj.weight": [0.004872571676969528], "request": {"prompt": "{} holds a position at", "subject": "Darleane C. Hoffman", "target_new": {"str": "the University of Cape Town"}, "old_answer": {"str": "Lawrence Berkeley National Laboratory"}, "seed": 42}}, {"loss_per_step": [2.123, 2.16, 0.32, 0.067, 0.045, 0.034, 0.025, 0.014, 0.01], "prob_new": [0.6134995818138123, 0.5518943667411804, 0.7676636576652527, 0.9361720085144043, 0.9565685987472534, 0.9672629833221436, 0.9751042127609253, 0.985956072807312, 0.9904025793075562], "prob_old": [0.8484284281730652, 0.5331558585166931, 0.5242986083030701, 0.425898939371109, 0.42231303453445435, 0.4443473517894745, 0.46426621079444885, 0.4487902820110321, 0.4416228234767914], "prob_new_token": [0.0020436712075024843, 0.0022462052293121815, 0.4477477967739105, 0.9359568953514099, 0.962114691734314, 0.9610097408294678, 0.9617502093315125, 0.9800410866737366, 0.9860303401947021], "prob_old_token": [0.7124742865562439, 0.11535558104515076, 0.0008002895046956837, 0.006359706167131662, 0.005213998723775148, 0.0020770118571817875, 0.000915643060579896, 0.00036794660263694823, 0.0002808905264828354], "l1-model.layers.0.mlp.down_proj.weight": [67410.0703125], "l2-model.layers.0.mlp.down_proj.weight": [11.757762908935547], "linf-model.layers.0.mlp.down_proj.weight": [0.003986178897321224], "request": {"prompt": "{} holds a position at", "subject": "J. M. Coetzee", "target_new": {"str": "Stanford University"}, "old_answer": {"str": "the University of Cape Town"}, "seed": 42}}, {"loss_per_step": [1.439, 0.659, 0.129, 0.289, 0.045, 0.025, 0.035, 0.01, 0.009], "prob_new": [0.7682777643203735, 0.8169063925743103, 0.9008538722991943, 0.8595932722091675, 0.9592375755310059, 0.9760563969612122, 0.9684902429580688, 0.9897423386573792, 0.9914592504501343], "prob_old": [0.8484284281730652, 0.49383336305618286, 0.47452449798583984, 0.5012981295585632, 0.502116858959198, 0.41592952609062195, 0.4267035126686096, 0.39979201555252075, 0.3915999233722687], "prob_new_token": [9.221502114087343e-05, 0.01357858907431364, 0.4923403859138489, 0.1520370990037918, 0.7884353399276733, 0.8641921877861023, 0.8059018850326538, 0.960176944732666, 0.9715096354484558], "prob_old_token": [0.7124742865562439, 0.0511404350399971, 0.07217083126306534, 0.11268479377031326, 0.005278333555907011, 0.011709645390510559, 0.009553379379212856, 0.0034283935092389584, 0.002685174345970154], "l1-model.layers.0.mlp.down_proj.weight": [63749.86328125], "l2-model.layers.0.mlp.down_proj.weight": [11.359134674072266], "linf-model.layers.0.mlp.down_proj.weight": [0.003963883966207504], "request": {"prompt": "{} holds a position at", "subject": "J. M. Coetzee", "target_new": {"str": "Carnegie Mellon University"}, "old_answer": {"str": "the University of Cape Town"}, "seed": 42}}, {"loss_per_step": [2.52, 1.463, 1.003, 0.845, 0.357, 0.133, 0.027, 0.008], "prob_new": [0.6281738877296448, 0.7969648241996765, 0.7849919199943542, 0.7392575144767761, 0.8140058517456055, 0.9030744433403015, 0.9742165803909302, 0.9922699332237244], "prob_old": [0.8484284281730652, 0.5228126645088196, 0.44525909423828125, 0.4463758170604706, 0.427191823720932, 0.4629434645175934, 0.4298490583896637, 0.40853896737098694], "prob_new_token": [6.811330877098953e-06, 0.00019493437139317393, 0.0033526034094393253, 0.012523816898465157, 0.15687473118305206, 0.4770585596561432, 0.8752774596214294, 0.9753376841545105], "prob_old_token": [0.7124742865562439, 0.04417411610484123, 0.042398836463689804, 0.1678672432899475, 0.04181313142180443, 0.0049093011766672134, 0.0015469008358195424, 0.00035166091402061284], "l1-model.layers.0.mlp.down_proj.weight": [65004.8828125], "l2-model.layers.0.mlp.down_proj.weight": [11.116439819335938], "linf-model.layers.0.mlp.down_proj.weight": [0.003500659018754959], "request": {"prompt": "{} holds a position at", "subject": "J. M. Coetzee", "target_new": {"str": "Lawrence Berkeley National Laboratory"}, "old_answer": {"str": "the University of Cape Town"}, "seed": 42}}, {"loss_per_step": [2.688, 2.905, 2.553, 2.422, 2.152, 2.009, 1.862, 1.76, 1.679, 1.541, 1.399, 1.225, 0.898, 0.399, 0.098, 0.042, 0.03, 0.022, 0.017, 0.013], "prob_new": [0.4794250428676605, 0.5245252251625061, 0.48607489466667175, 0.7254144549369812, 0.7353494763374329, 0.7434974908828735, 0.7474964261054993, 0.7454177737236023, 0.7421398162841797, 0.7486811876296997, 0.7556281089782715, 0.758793294429779, 0.7648792266845703, 0.7957763671875, 0.9136838316917419, 0.9595609903335571, 0.9713907241821289, 0.9786121249198914, 0.9837097525596619, 0.9873356223106384], "prob_old": [0.8382276892662048, 0.3076012134552002, 0.2690315246582031, 0.32960668206214905, 0.3270739018917084, 0.32474666833877563, 0.32391974329948425, 0.3200775682926178, 0.3100237250328064, 0.29735037684440613, 0.28780561685562134, 0.2843574285507202, 0.2880883514881134, 0.2920030355453491, 0.2937876582145691, 0.2991895079612732, 0.30241280794143677, 0.30370479822158813, 0.3041057288646698, 0.3042241930961609], "prob_new_token": [2.253292768727988e-05, 3.2296942663379014e-05, 5.487710586749017e-05, 8.574658750148956e-06, 3.077161454712041e-05, 5.946279634372331e-05, 0.00012056573177687824, 0.00020407895499374717, 0.00031111176940612495, 0.0005965617601759732, 0.0011657121358439326, 0.0027302573435008526, 0.013706122525036335, 0.16675594449043274, 0.7198925614356995, 0.8872554302215576, 0.9136136174201965, 0.9327049255371094, 0.9480241537094116, 0.9596030116081238], "prob_old_token": [0.6083126068115234, 0.001997125567868352, 0.0035703531466424465, 0.0007657199748791754, 0.0029596344102174044, 0.003589457832276821, 0.004264192655682564, 0.004428839776664972, 0.004545304458588362, 0.005307450890541077, 0.006526283919811249, 0.008702819235622883, 0.013627048581838608, 0.015379390679299831, 0.003877777373418212, 0.001504880958236754, 0.0014528227038681507, 0.0013770379591733217, 0.0010588523000478745, 0.000728594430256635], "l1-model.layers.0.mlp.down_proj.weight": [98275.375], "l2-model.layers.0.mlp.down_proj.weight": [17.5345458984375], "linf-model.layers.0.mlp.down_proj.weight": [0.00958018098026514], "request": {"prompt": "{} is held by", "subject": "Minister of Foreign Affairs of Belarus", "target_new": {"str": "Idriss D\u00e9by"}, "old_answer": {"str": "Vladimir Makei"}, "seed": 42}}, {"loss_per_step": [5.808, 3.296, 2.71, 2.322, 1.82, 1.111, 0.5, 0.218, 0.242, 0.392, 0.006], "prob_new": [0.4812857508659363, 0.665675163269043, 0.7277663350105286, 0.7372276186943054, 0.7392240762710571, 0.7490139007568359, 0.7816811800003052, 0.8530656099319458, 0.8441510200500488, 0.8002549409866333, 0.9945141077041626], "prob_old": [0.8382276892662048, 0.3305926024913788, 0.31044530868530273, 0.3102532625198364, 0.3076285719871521, 0.31933802366256714, 0.34240230917930603, 0.38331371545791626, 0.32979506254196167, 0.326656699180603, 0.43795245885849], "prob_new_token": [9.099828446323954e-08, 2.749335180851631e-06, 2.148008206859231e-05, 9.740710811456665e-05, 0.0007201407570391893, 0.011930649168789387, 0.13670186698436737, 0.42318668961524963, 0.3816770613193512, 0.210069477558136, 0.9877400994300842], "prob_old_token": [0.6083126068115234, 0.0036202126648277044, 0.010566280223429203, 0.0118158720433712, 0.00850564707070589, 0.026049267500638962, 0.005774687509983778, 0.0006671391893178225, 0.0007408784586004913, 0.009196261875331402, 0.00010767618368845433], "l1-model.layers.0.mlp.down_proj.weight": [78855.21875], "l2-model.layers.0.mlp.down_proj.weight": [13.480401039123535], "linf-model.layers.0.mlp.down_proj.weight": [0.00503549724817276], "request": {"prompt": "{} is held by", "subject": "Minister of Foreign Affairs of Belarus", "target_new": {"str": "Gaston Browne"}, "old_answer": {"str": "Vladimir Makei"}, "seed": 42}}, {"loss_per_step": [5.622, 3.58, 2.562, 2.574, 3.587, 2.682, 1.846, 1.641, 1.326, 1.306, 0.757, 0.408, 0.314, 0.155, 0.04, 0.014, 0.007], "prob_new": [0.4701083302497864, 0.4826529920101166, 0.655744731426239, 0.7084846496582031, 0.5189456343650818, 0.6764742136001587, 0.7295486330986023, 0.7289221286773682, 0.7395780086517334, 0.7240128517150879, 0.7573796510696411, 0.7939656972885132, 0.8181303143501282, 0.8817181587219238, 0.9629294872283936, 0.9864888191223145, 0.9929348230361938], "prob_old": [0.9186565279960632, 0.5224855542182922, 0.5823289752006531, 0.6488273739814758, 0.7097417712211609, 0.7186228036880493, 0.7357040643692017, 0.7257390022277832, 0.7384838461875916, 0.6653992533683777, 0.7165897488594055, 0.763043224811554, 0.7691825032234192, 0.7398531436920166, 0.7216731309890747, 0.7119196057319641, 0.706115186214447], "prob_new_token": [4.263490609446308e-06, 2.893351120292209e-05, 5.609287472907454e-05, 4.039701889269054e-05, 4.897910912404768e-06, 3.05937064695172e-05, 0.000675604329444468, 0.001541883684694767, 0.005215826444327831, 0.006025020033121109, 0.04944945499300957, 0.20058517158031464, 0.2889877259731293, 0.553304135799408, 0.8666884899139404, 0.9555355310440063, 0.9788702726364136], "prob_old_token": [0.6722553372383118, 8.57215272844769e-05, 3.170671334373765e-05, 1.9810686353594065e-06, 6.797810101488722e-07, 6.103847408667207e-05, 0.016531532630324364, 0.001277506467886269, 0.031360987573862076, 5.25923642271664e-05, 0.0016248427564278245, 0.00046382180880755186, 0.0019087252439931035, 7.223083230201155e-05, 0.00010713504889281467, 0.0001245073217432946, 8.703043567948043e-05], "l1-model.layers.0.mlp.down_proj.weight": [87932.078125], "l2-model.layers.0.mlp.down_proj.weight": [15.81418228149414], "linf-model.layers.0.mlp.down_proj.weight": [0.007892265915870667], "request": {"prompt": "{} is held by", "subject": "list of heads of state of Chad", "target_new": {"str": "Gaston Browne"}, "old_answer": {"str": "Idriss D\u00e9by"}, "seed": 42}}, {"loss_per_step": [7.107, 6.434, 6.427, 4.889, 4.615, 3.396, 2.92, 2.595, 2.315, 1.964, 1.617, 1.282, 0.952, 0.711, 0.532, 0.389, 0.259, 0.177, 0.133, 0.108], "prob_new": [0.15828540921211243, 0.0581388995051384, 0.2479281723499298, 0.21611981093883514, 0.3001641035079956, 0.3980688750743866, 0.49840793013572693, 0.5907160043716431, 0.620148241519928, 0.6373066306114197, 0.6492974758148193, 0.6594802737236023, 0.6762310266494751, 0.7000786662101746, 0.730135440826416, 0.7673519849777222, 0.8175579905509949, 0.8606885671615601, 0.8887118697166443, 0.9065090417861938], "prob_old": [0.9186565279960632, 0.5345025658607483, 0.5168267488479614, 0.5132337808609009, 0.7158681750297546, 0.7092666625976562, 0.6721646189689636, 0.6503541469573975, 0.6344760060310364, 0.6226305365562439, 0.6141412854194641, 0.609832227230072, 0.6039884686470032, 0.5956208109855652, 0.5874592065811157, 0.5808807611465454, 0.5731909871101379, 0.5623127818107605, 0.5520747303962708, 0.5506061911582947], "prob_new_token": [4.95036510983482e-06, 5.818224963149987e-05, 1.5474274732696358e-06, 9.809939365368336e-05, 1.591415457369294e-05, 0.0001359720918117091, 0.0003009305219165981, 0.0005312846042215824, 0.0011144070886075497, 0.0030309082940220833, 0.008325984701514244, 0.02232937514781952, 0.05922030285000801, 0.12110210955142975, 0.20564855635166168, 0.3153689503669739, 0.4656621217727661, 0.5961613059043884, 0.682402491569519, 0.7376009821891785], "prob_old_token": [0.6722553372383118, 0.0002650485257618129, 0.00012181278725620359, 0.00022514056763611734, 0.0025344672612845898, 0.00047960985102690756, 0.0006872010417282581, 0.0005675939028151333, 0.00038682552985846996, 0.00033415667712688446, 0.00034750907798297703, 0.00042910006595775485, 0.0006263944669626653, 0.0005864330451004207, 0.0003779979015234858, 0.00021763263794127852, 0.00010785830090753734, 5.141572546563111e-05, 3.312233093311079e-05, 2.8852860850747675e-05], "l1-model.layers.0.mlp.down_proj.weight": [102897.875], "l2-model.layers.0.mlp.down_proj.weight": [18.305395126342773], "linf-model.layers.0.mlp.down_proj.weight": [0.009660833515226841], "request": {"prompt": "{} is held by", "subject": "list of heads of state of Chad", "target_new": {"str": "Vladimir Makei"}, "old_answer": {"str": "Idriss D\u00e9by"}, "seed": 42}}, {"loss_per_step": [3.05, 1.155, 1.553, 0.094, 0.04, 0.059, 0.073, 0.022, 0.011, 0.008], "prob_new": [0.46842142939567566, 0.49536046385765076, 0.570301353931427, 0.912082314491272, 0.9609838724136353, 0.9442246556282043, 0.9351246953010559, 0.978397011756897, 0.9892682433128357, 0.9920260310173035], "prob_old": [0.8401201963424683, 0.6548154354095459, 0.6613544225692749, 0.6526497602462769, 0.6482138633728027, 0.6460258960723877, 0.6546280384063721, 0.6728150844573975, 0.6850175857543945, 0.6913900971412659], "prob_new_token": [4.251266091159778e-06, 0.0322655513882637, 0.22231921553611755, 0.8771332502365112, 0.9406088590621948, 0.9623270630836487, 0.975471019744873, 0.9893223643302917, 0.9916804432868958, 0.9922611117362976], "prob_old_token": [0.8187586665153503, 0.0002161036100005731, 2.8453445338527672e-05, 1.3509586551663233e-06, 3.8038839988985274e-07, 1.798987909751304e-07, 8.635203840867689e-08, 5.5922939168340235e-08, 5.90323807614368e-08, 6.226909476936271e-08], "l1-model.layers.0.mlp.down_proj.weight": [72280.421875], "l2-model.layers.0.mlp.down_proj.weight": [12.360393524169922], "linf-model.layers.0.mlp.down_proj.weight": [0.004503757227212191], "request": {"prompt": "{} is represented by the music label", "subject": "Godspeed You! Black Emperor", "target_new": {"str": "Warner Bros. Records"}, "old_answer": {"str": "Constellation Records"}, "seed": 42}}, {"loss_per_step": [2.417, 1.458, 0.942, 0.426, 0.147, 0.044, 0.018, 0.012, 0.011, 0.01, 0.009], "prob_new": [0.5253622531890869, 0.633874773979187, 0.7649824023246765, 0.817030131816864, 0.8909510374069214, 0.958807110786438, 0.9821768999099731, 0.9878449440002441, 0.9893524050712585, 0.9897536039352417, 0.9912724494934082], "prob_old": [0.8401201963424683, 0.624455451965332, 0.6465858817100525, 0.6490594148635864, 0.6598491072654724, 0.6714547872543335, 0.6809972524642944, 0.686213493347168, 0.6881874799728394, 0.6887477040290833, 0.6892030239105225], "prob_new_token": [0.00048019958194345236, 0.0009143694769591093, 0.005559917539358139, 0.0948672667145729, 0.47110456228256226, 0.8407018184661865, 0.9575448632240295, 0.977938175201416, 0.9784876704216003, 0.9748503565788269, 0.9787959456443787], "prob_old_token": [0.8187586665153503, 7.928497507236898e-05, 0.0002903990098275244, 0.0009482903988100588, 0.0003174772718921304, 7.127903518266976e-05, 2.0423813111847267e-05, 1.2115927347622346e-05, 1.1894362614839338e-05, 1.2624491319002118e-05, 1.0604297131067142e-05], "l1-model.layers.0.mlp.down_proj.weight": [71378.1171875], "l2-model.layers.0.mlp.down_proj.weight": [12.725007057189941], "linf-model.layers.0.mlp.down_proj.weight": [0.004869077354669571], "request": {"prompt": "{} is represented by the music label", "subject": "Godspeed You! Black Emperor", "target_new": {"str": "Konvict Muzik"}, "old_answer": {"str": "Constellation Records"}, "seed": 42}}, {"loss_per_step": [5.813, 2.718, 2.211, 1.268, 0.266, 0.101, 0.044, 0.019, 0.016, 0.018, 0.012, 0.006], "prob_new": [0.14490577578544617, 0.4165104329586029, 0.558591365814209, 0.6325128674507141, 0.8020662069320679, 0.9083044528961182, 0.9578582644462585, 0.9815523028373718, 0.9845155477523804, 0.9822463989257812, 0.9876946210861206, 0.9936746954917908], "prob_old": [0.8401201963424683, 0.6317710876464844, 0.6572629809379578, 0.6260135769844055, 0.5461465120315552, 0.5718880891799927, 0.5969790816307068, 0.6239402294158936, 0.6505194306373596, 0.6650221347808838, 0.6663400530815125, 0.6566590666770935], "prob_new_token": [6.53521738058771e-06, 0.0007372000836767256, 0.0018817830132320523, 0.025440776720643044, 0.4925785958766937, 0.7847452163696289, 0.9077188968658447, 0.9628322720527649, 0.9649624228477478, 0.9550349712371826, 0.9694539308547974, 0.9861711859703064], "prob_old_token": [0.8187586665153503, 0.00010087939881486818, 8.19410415715538e-05, 0.00013038353063166142, 2.440960452076979e-05, 7.015084065642441e-06, 3.3663652629911667e-06, 2.0999962089263136e-06, 2.028613380389288e-06, 1.985049721042742e-06, 1.1941079947064281e-06, 5.296447511682345e-07], "l1-model.layers.0.mlp.down_proj.weight": [81278.359375], "l2-model.layers.0.mlp.down_proj.weight": [14.04214859008789], "linf-model.layers.0.mlp.down_proj.weight": [0.005205050110816956], "request": {"prompt": "{} is represented by the music label", "subject": "Godspeed You! Black Emperor", "target_new": {"str": "Armada Music"}, "old_answer": {"str": "Constellation Records"}, "seed": 42}}, {"loss_per_step": [6.934, 3.194, 2.298, 0.573, 0.073, 0.059, 0.027, 0.019, 0.015, 0.012, 0.009], "prob_new": [0.2340732216835022, 0.40596669912338257, 0.6060518026351929, 0.6896090507507324, 0.9303449988365173, 0.9429928064346313, 0.9736787676811218, 0.9813176989555359, 0.9851846694946289, 0.9882738590240479, 0.9913327693939209], "prob_old": [0.9576637148857117, 0.7851917147636414, 0.7323383688926697, 0.7092816233634949, 0.6509882211685181, 0.6306624412536621, 0.6440578103065491, 0.6546757221221924, 0.6577332615852356, 0.6541181802749634, 0.6459171175956726], "prob_new_token": [1.0154884222401961e-07, 0.0002358514757361263, 0.0012287218123674393, 0.2084762305021286, 0.9029051065444946, 0.9207449555397034, 0.9862650036811829, 0.9937061667442322, 0.9958838820457458, 0.9968466758728027, 0.9973264336585999], "prob_old_token": [0.8164881467819214, 0.0007186211296357214, 4.161277502134908e-06, 4.726857696368825e-06, 3.708230167376314e-07, 1.4741840459464584e-07, 2.0488590379841298e-08, 7.3777419729026406e-09, 4.377549878142872e-09, 3.3499500950995298e-09, 3.01173797012666e-09], "l1-model.layers.0.mlp.down_proj.weight": [77472.234375], "l2-model.layers.0.mlp.down_proj.weight": [13.578680992126465], "linf-model.layers.0.mlp.down_proj.weight": [0.004952054005116224], "request": {"prompt": "{} is represented by the music label", "subject": "Anne-Sophie Mutter", "target_new": {"str": "Peaceville Records"}, "old_answer": {"str": "Deutsche Grammophon"}, "seed": 42}}, {"loss_per_step": [4.19, 2.541, 2.239, 3.381, 2.179, 1.076, 0.663, 0.226, 0.105, 0.094, 0.062, 0.034, 0.019, 0.011, 0.008], "prob_new": [0.34920310974121094, 0.44570013880729675, 0.5526152849197388, 0.4045804738998413, 0.47802552580833435, 0.5655083656311035, 0.7035624384880066, 0.8176913261413574, 0.9066223502159119, 0.9148775339126587, 0.9421091079711914, 0.9667332172393799, 0.9814951419830322, 0.9887840747833252, 0.9921131134033203], "prob_old": [0.9576637148857117, 0.7923811078071594, 0.8156399726867676, 0.7330031394958496, 0.7585458755493164, 0.7360159158706665, 0.7474552392959595, 0.752972424030304, 0.7828994989395142, 0.7852705121040344, 0.7850345969200134, 0.7838259935379028, 0.7820461392402649, 0.7807680368423462, 0.7801095843315125], "prob_new_token": [4.1410003177588806e-06, 0.0002837601350620389, 0.0003906933416146785, 0.0001600459945620969, 0.0024142670445144176, 0.04612768068909645, 0.09612245112657547, 0.80790114402771, 0.7436277866363525, 0.7758912444114685, 0.8655532002449036, 0.9320117235183716, 0.9626950621604919, 0.9756448268890381, 0.9813767075538635], "prob_old_token": [0.8164881467819214, 0.0015450246864929795, 0.11937468498945236, 2.204612064815592e-05, 0.0003760673862416297, 2.8300570193096064e-05, 7.747208655928262e-06, 7.98188011685852e-06, 2.4174740246962756e-05, 2.505635166016873e-05, 1.4527443454426248e-05, 6.567255695699714e-06, 2.9717054985667346e-06, 1.609823584658443e-06, 1.0525501465963316e-06], "l1-model.layers.0.mlp.down_proj.weight": [80294.0078125], "l2-model.layers.0.mlp.down_proj.weight": [14.45573902130127], "linf-model.layers.0.mlp.down_proj.weight": [0.0066656325943768024], "request": {"prompt": "{} is represented by the music label", "subject": "Anne-Sophie Mutter", "target_new": {"str": "XL Recordings"}, "old_answer": {"str": "Deutsche Grammophon"}, "seed": 42}}, {"loss_per_step": [8.684, 4.474, 3.13, 1.383, 3.077, 0.543, 0.515, 0.065, 0.023, 0.015, 0.013, 0.01, 0.008], "prob_new": [0.005541480612009764, 0.16783420741558075, 0.4904176890850067, 0.5283483862876892, 0.47902336716651917, 0.6671987771987915, 0.6771167516708374, 0.9387627840042114, 0.9778294563293457, 0.9851477146148682, 0.9875175952911377, 0.9897720813751221, 0.9916858673095703], "prob_old": [0.9576637148857117, 0.7888135313987732, 0.7862973809242249, 0.7673757672309875, 0.6256861686706543, 0.6173617243766785, 0.6756269931793213, 0.6206929087638855, 0.6073830127716064, 0.5991072654724121, 0.5934341549873352, 0.5902463793754578, 0.588560163974762], "prob_new_token": [2.58570617006626e-06, 0.00038734881673008204, 0.001951631740666926, 0.06328737735748291, 0.0022250742185860872, 0.33955612778663635, 0.3589525818824768, 0.8825591206550598, 0.960218071937561, 0.9749138355255127, 0.9800992608070374, 0.984464168548584, 0.9878626465797424], "prob_old_token": [0.8164881467819214, 0.0019633823540061712, 0.0003392109938431531, 0.00013970393047202379, 2.1040195861132815e-06, 1.2475804396672174e-05, 3.944986019632779e-05, 9.028591193782631e-06, 2.5744714093889343e-06, 1.7662302980170352e-06, 1.5369162156275706e-06, 1.275183990401274e-06, 1.0312818403690471e-06], "l1-model.layers.0.mlp.down_proj.weight": [77952.703125], "l2-model.layers.0.mlp.down_proj.weight": [13.922402381896973], "linf-model.layers.0.mlp.down_proj.weight": [0.005528528243303299], "request": {"prompt": "{} is represented by the music label", "subject": "Anne-Sophie Mutter", "target_new": {"str": "Domino"}, "old_answer": {"str": "Deutsche Grammophon"}, "seed": 42}}, {"loss_per_step": [3.415, 2.523, 1.642, 1.444, 1.196, 1.14, 0.798, 1.097, 0.901, 0.582, 0.457, 0.385, 0.246, 0.147, 0.078, 0.041, 0.022, 0.014, 0.01], "prob_new": [0.46457263827323914, 0.46849092841148376, 0.654670238494873, 0.7201595306396484, 0.7528688311576843, 0.7622553110122681, 0.7460750937461853, 0.753750205039978, 0.7827998995780945, 0.758417546749115, 0.7722575068473816, 0.79353266954422, 0.8353723883628845, 0.879633903503418, 0.9279157519340515, 0.9607501029968262, 0.9779882431030273, 0.9862608313560486, 0.9903851747512817], "prob_old": [0.9080218076705933, 0.6812659502029419, 0.5676450133323669, 0.581477165222168, 0.3232572078704834, 0.10480209439992905, 0.023299459367990494, 0.006164472550153732, 0.007108400575816631, 0.004741514567285776, 0.006311415694653988, 0.007418592926114798, 0.0059527563862502575, 0.0041955867782235146, 0.002881627297028899, 0.0021183944772928953, 0.0017722614575177431, 0.0016399011947214603, 0.0015766751021146774], "prob_new_token": [1.5300216546165757e-05, 0.0002674872812349349, 0.0007940858486108482, 0.0011672601103782654, 0.003262373385950923, 0.004097984172403812, 0.025549832731485367, 0.005389023572206497, 0.012259776704013348, 0.07458756119012833, 0.13699805736541748, 0.18358956277370453, 0.3515067398548126, 0.5792457461357117, 0.8176553845405579, 0.9290806651115417, 0.9668541550636292, 0.9827188849449158, 0.9902185797691345], "prob_old_token": [0.7662683725357056, 0.14403849840164185, 0.07152891159057617, 0.08912826329469681, 0.008341148495674133, 0.0008101069834083319, 0.0001845091173890978, 0.00027491277433000505, 0.00014373463636729866, 0.0003903804754372686, 8.797012560535222e-05, 5.0531685701571405e-05, 1.0889199984376319e-05, 2.3384227461065166e-06, 8.902294439394609e-07, 4.1696728203532984e-07, 2.2545650324445887e-07, 1.23098004678468e-07, 6.712198796776647e-08], "l1-model.layers.0.mlp.down_proj.weight": [93454.96875], "l2-model.layers.0.mlp.down_proj.weight": [16.939594268798828], "linf-model.layers.0.mlp.down_proj.weight": [0.008721753023564816], "request": {"prompt": "The person who serves as the head coach for {} is", "subject": "Spain women's national association football team", "target_new": {"str": "Michael O'Neill"}, "old_answer": {"str": "Jorge Vilda"}, "seed": 42}}, {"loss_per_step": [2.651, 2.578, 2.515, 1.133, 0.627, 0.211, 0.057, 0.017, 0.008], "prob_new": [0.7096829414367676, 0.7330107688903809, 0.7449021339416504, 0.7504695057868958, 0.7688212394714355, 0.8566237688064575, 0.9488706588745117, 0.9837385416030884, 0.992469310760498], "prob_old": [0.9080218076705933, 0.5717870593070984, 0.5621087551116943, 0.6235269904136658, 0.6282088756561279, 0.6256281137466431, 0.6293520331382751, 0.6351712942123413, 0.6363860368728638], "prob_new_token": [2.9521990654757246e-05, 3.562600977602415e-05, 4.363496191217564e-05, 0.010851811617612839, 0.08205612003803253, 0.431641161441803, 0.7999346256256104, 0.9393259882926941, 0.9740210175514221], "prob_old_token": [0.7662683725357056, 0.007162041962146759, 0.0041360692121088505, 0.008952424861490726, 0.0165565088391304, 0.0026834988966584206, 0.0006160303601063788, 0.0001031925348797813, 2.8767302865162492e-05], "l1-model.layers.0.mlp.down_proj.weight": [63061.62109375], "l2-model.layers.0.mlp.down_proj.weight": [11.213138580322266], "linf-model.layers.0.mlp.down_proj.weight": [0.00398625060915947], "request": {"prompt": "The person who serves as the head coach for {} is", "subject": "Spain women's national association football team", "target_new": {"str": "Pia Sundhage"}, "old_answer": {"str": "Jorge Vilda"}, "seed": 42}}, {"loss_per_step": [5.4, 3.732, 2.345, 1.084, 1.302, 4.889, 2.944, 10.274, 10.088, 6.596, 4.873, 2.625, 1.88, 1.222, 0.662, 0.328, 0.159, 0.122, 0.114, 0.101], "prob_new": [0.25741130113601685, 0.6271566152572632, 0.6550707221031189, 0.6693428754806519, 0.6536381244659424, 0.07723448425531387, 0.5791327953338623, 4.4165903091197833e-05, 5.889849126106128e-05, 0.22023648023605347, 0.3167925775051117, 0.5334522724151611, 0.5969902276992798, 0.655559241771698, 0.6923508048057556, 0.7784752249717712, 0.8678147196769714, 0.8935117721557617, 0.8998783826828003, 0.9099528789520264], "prob_old": [0.9080218076705933, 0.6407496929168701, 0.6416317224502563, 0.31762421131134033, 0.5506505370140076, 0.0009904142934828997, 0.00011213283869437873, 9.569946269039065e-05, 0.00010289717465639114, 9.794073412194848e-05, 0.00017214621766470373, 0.0008850886370055377, 0.004795154556632042, 0.007351301610469818, 0.004132967442274094, 0.002527837408706546, 0.0015595352742820978, 0.001625941484235227, 0.0019363050814718008, 0.0020060972310602665], "prob_new_token": [2.869437594199553e-06, 1.5553674529655837e-05, 0.000911919807549566, 0.039907388389110565, 0.02137749455869198, 7.431332051055506e-05, 0.00019339709251653403, 1.172526663140161e-05, 1.0546064913796727e-05, 1.2005406460957602e-05, 0.00028058505267836154, 0.0006229410646483302, 0.004487520083785057, 0.027233611792325974, 0.147722065448761, 0.3979648947715759, 0.6541223526000977, 0.7249831557273865, 0.7409446239471436, 0.7685871720314026], "prob_old_token": [0.7662683725357056, 0.05040224641561508, 0.045348700135946274, 6.420759746106341e-05, 0.0033776869531720877, 4.377133518573828e-05, 8.713166607776657e-05, 8.016636456886772e-06, 1.566031642141752e-05, 9.505802154308185e-06, 5.709359174943529e-05, 1.871943641162943e-05, 7.352506509050727e-05, 0.00013886496890336275, 5.7370310969417915e-05, 1.6823771147755906e-05, 5.4753240874561016e-06, 2.924213504229556e-06, 2.2498472844745265e-06, 1.786250891200325e-06], "l1-model.layers.0.mlp.down_proj.weight": [93228.515625], "l2-model.layers.0.mlp.down_proj.weight": [16.932950973510742], "linf-model.layers.0.mlp.down_proj.weight": [0.009612079709768295], "request": {"prompt": "The person who serves as the head coach for {} is", "subject": "Spain women's national association football team", "target_new": {"str": "Eddie Jones"}, "old_answer": {"str": "Jorge Vilda"}, "seed": 42}}, {"loss_per_step": [7.073, 4.345, 4.558, 4.344, 3.918, 3.442, 3.012, 2.566, 1.637, 4.624, 1.597, 1.434, 1.309, 1.152, 1.003, 0.872, 0.674, 0.403, 0.162, 0.047], "prob_new": [0.2787639796733856, 0.2040102332830429, 0.06854863464832306, 0.08640311658382416, 0.16691192984580994, 0.3312738537788391, 0.3431687355041504, 0.3476142883300781, 0.40749868750572205, 0.2607906460762024, 0.5101452469825745, 0.5149557590484619, 0.5491511225700378, 0.6049661636352539, 0.6522676944732666, 0.6767467260360718, 0.7041893005371094, 0.7632441520690918, 0.8706637620925903, 0.9558420181274414], "prob_old": [0.8151693344116211, 0.3882501721382141, 0.007720819674432278, 0.007222320884466171, 0.007070590741932392, 0.006601226981729269, 0.0073227472603321075, 0.008809818886220455, 0.065567746758461, 0.004400154110044241, 0.0024361349642276764, 0.0028173858299851418, 0.0040197172202169895, 0.005112713668495417, 0.005879505537450314, 0.007342159282416105, 0.011105566285550594, 0.01768190972507, 0.011449865996837616, 0.008927697315812111], "prob_new_token": [2.648082272571628e-06, 0.0006931921816430986, 0.008477703668177128, 0.012733940035104752, 0.019940325990319252, 0.026347462087869644, 0.032235756516456604, 0.036807507276535034, 0.03922843188047409, 0.014449859969317913, 0.01603720337152481, 0.025841424241662025, 0.03176209330558777, 0.04049317166209221, 0.054595962166786194, 0.0766245573759079, 0.1355762779712677, 0.3028346598148346, 0.6209905743598938, 0.8772788643836975], "prob_old_token": [0.6482585668563843, 0.08696765452623367, 0.014813859947025776, 0.013683534227311611, 0.013294448144733906, 0.011231428012251854, 0.008913220837712288, 0.007364619057625532, 0.014744846150279045, 0.008270315825939178, 0.004799406975507736, 0.005547161214053631, 0.007869372144341469, 0.00996936671435833, 0.011565214022994041, 0.01452110055834055, 0.02196737751364708, 0.03470929339528084, 0.021900784224271774, 0.010009144432842731], "l1-model.layers.0.mlp.down_proj.weight": [102287.1171875], "l2-model.layers.0.mlp.down_proj.weight": [18.458765029907227], "linf-model.layers.0.mlp.down_proj.weight": [0.009947715327143669], "request": {"prompt": "The person who serves as the head coach for {} is", "subject": "Huddersfield Town A.F.C.", "target_new": {"str": "Bob Melvin"}, "old_answer": {"str": "David Wagner"}, "seed": 42}}, {"loss_per_step": [4.296, 3.174, 1.944, 0.884, 0.524, 0.103, 0.053, 0.027, 0.018, 0.012, 0.008], "prob_new": [0.28204405307769775, 0.1603764444589615, 0.4956931173801422, 0.5812303423881531, 0.6620103716850281, 0.9056975841522217, 0.9493746757507324, 0.9737018346786499, 0.9826287031173706, 0.9882063865661621, 0.9919995069503784], "prob_old": [0.8151693344116211, 0.2403419464826584, 0.1591145545244217, 0.11859998852014542, 0.006871434394270182, 0.007688435725867748, 0.024141430854797363, 0.13410590589046478, 0.3519481420516968, 0.40093258023262024, 0.3824290931224823], "prob_new_token": [0.000329130474710837, 0.005551354959607124, 0.02110903523862362, 0.17222146689891815, 0.36550813913345337, 0.8302432298660278, 0.91116863489151, 0.9534531235694885, 0.9687360525131226, 0.9792687892913818, 0.986585259437561], "prob_old_token": [0.6482585668563843, 0.03004528023302555, 0.012107792310416698, 0.014094756916165352, 0.002871100790798664, 0.00017222760652657598, 0.00011027476284652948, 7.24794517736882e-05, 6.029318683431484e-05, 2.9623493901453912e-05, 1.0863082025025506e-05], "l1-model.layers.0.mlp.down_proj.weight": [74209.9140625], "l2-model.layers.0.mlp.down_proj.weight": [13.110461235046387], "linf-model.layers.0.mlp.down_proj.weight": [0.004794320557266474], "request": {"prompt": "The person who serves as the head coach for {} is", "subject": "Huddersfield Town A.F.C.", "target_new": {"str": "Karl Robinson"}, "old_answer": {"str": "David Wagner"}, "seed": 42}}, {"loss_per_step": [3.51, 1.935, 1.627, 0.646, 0.206, 0.077, 0.027, 0.018, 0.012, 0.009], "prob_new": [0.5479843020439148, 0.7407159805297852, 0.742374837398529, 0.7521049380302429, 0.8360134363174438, 0.9323263168334961, 0.9741752743721008, 0.9821082949638367, 0.9881024360656738, 0.9909517168998718], "prob_old": [0.8151693344116211, 0.5092870593070984, 0.5617589354515076, 0.5450061559677124, 0.6542386412620544, 0.5127164721488953, 0.5017445683479309, 0.49998101592063904, 0.49965915083885193, 0.4994119703769684], "prob_new_token": [3.284277681814274e-06, 8.603495371062309e-05, 0.00039849255699664354, 0.05430779233574867, 0.5344794988632202, 0.729779064655304, 0.9303582310676575, 0.960452139377594, 0.9797514081001282, 0.9906274080276489], "prob_old_token": [0.6482585668563843, 0.054943330585956573, 0.13832731544971466, 0.09249226748943329, 0.3092147409915924, 0.02727675810456276, 0.004170469008386135, 0.0005947292083874345, 0.00016131109441630542, 6.464395119110122e-05], "l1-model.layers.0.mlp.down_proj.weight": [73069.109375], "l2-model.layers.0.mlp.down_proj.weight": [12.803092956542969], "linf-model.layers.0.mlp.down_proj.weight": [0.004473651759326458], "request": {"prompt": "The person who serves as the head coach for {} is", "subject": "Huddersfield Town A.F.C.", "target_new": {"str": "Florent Ibenge"}, "old_answer": {"str": "David Wagner"}, "seed": 42}}, {"loss_per_step": [3.54, 2.275, 1.412, 0.968, 0.627, 0.204, 0.073, 0.035, 0.024, 0.014, 0.006], "prob_new": [0.5062932968139648, 0.6492473483085632, 0.7370695471763611, 0.7832708358764648, 0.7856270670890808, 0.8593356013298035, 0.9349619150161743, 0.966680645942688, 0.9771380424499512, 0.9859690070152283, 0.993664562702179], "prob_old": [0.8161789774894714, 0.4243389070034027, 0.38962262868881226, 0.36634835600852966, 0.3837527930736542, 0.39587482810020447, 0.4527716338634491, 0.5052822828292847, 0.5309643745422363, 0.55384761095047, 0.5617768168449402], "prob_new_token": [7.655329682165757e-06, 3.110163743258454e-05, 0.0012191110290586948, 0.008670725859701633, 0.04919954761862755, 0.4028468728065491, 0.7410423755645752, 0.8710401654243469, 0.9101942181587219, 0.9488377571105957, 0.9807266592979431], "prob_old_token": [0.7256129384040833, 0.05131423473358154, 0.0021331317257136106, 0.0010296246036887169, 0.0008692849660292268, 0.0017920045647770166, 0.0029077508952468634, 0.0038285048212856054, 0.0016494202427566051, 0.001257260562852025, 0.0016918490873649716], "l1-model.layers.0.mlp.down_proj.weight": [75609.9296875], "l2-model.layers.0.mlp.down_proj.weight": [13.248079299926758], "linf-model.layers.0.mlp.down_proj.weight": [0.004962466657161713], "request": {"prompt": "{} has the job title of", "subject": "Paul Biya", "target_new": {"str": "Sultan of Brunei"}, "old_answer": {"str": "President of Cameroon"}, "seed": 42}}, {"loss_per_step": [5.507, 3.57, 1.937, 0.97, 0.645, 0.346, 0.217, 0.12, 0.058, 0.041, 0.031, 0.024, 0.019, 0.015, 0.012, 0.01], "prob_new": [0.0979776456952095, 0.2002718299627304, 0.36179468035697937, 0.5283292531967163, 0.6308432817459106, 0.7396087050437927, 0.8209468722343445, 0.8918755650520325, 0.944246232509613, 0.9604328274726868, 0.9693393111228943, 0.9759611487388611, 0.9812798500061035, 0.9853094220161438, 0.9882335066795349, 0.9903263449668884], "prob_old": [0.8161789774894714, 0.5572242736816406, 0.5479926466941833, 0.521401584148407, 0.5163226127624512, 0.4959413707256317, 0.4481726586818695, 0.42879435420036316, 0.4173000454902649, 0.3813173770904541, 0.3197115659713745, 0.23704738914966583, 0.2042589634656906, 0.19314570724964142, 0.18600070476531982, 0.18054723739624023], "prob_new_token": [0.00014183954044710845, 0.0021340360399335623, 0.0340229757130146, 0.06421516835689545, 0.1319383978843689, 0.4052855670452118, 0.5593324899673462, 0.7280620336532593, 0.897001326084137, 0.9324557781219482, 0.9513222575187683, 0.9668524861335754, 0.9788275361061096, 0.9865467548370361, 0.9911477565765381, 0.9938411116600037], "prob_old_token": [0.7256129384040833, 0.06470122188329697, 0.01970314234495163, 0.006441803649067879, 0.0022377222776412964, 0.0007756357663311064, 0.0004609944298863411, 0.0002523068687878549, 7.878411997808143e-05, 5.335321475286037e-05, 4.834807259612717e-05, 4.064968743477948e-05, 2.9688342692679726e-05, 2.0109606339246966e-05, 1.3351730558497366e-05, 8.984227861219551e-06], "l1-model.layers.0.mlp.down_proj.weight": [89862.421875], "l2-model.layers.0.mlp.down_proj.weight": [15.817277908325195], "linf-model.layers.0.mlp.down_proj.weight": [0.0070357732474803925], "request": {"prompt": "{} has the job title of", "subject": "Paul Biya", "target_new": {"str": "Grand Prince of Kiev"}, "old_answer": {"str": "President of Cameroon"}, "seed": 42}}, {"loss_per_step": [3.907, 1.223, 1.042, 0.496, 0.274, 0.207, 0.123, 0.071, 0.043, 0.027, 0.018, 0.012, 0.008], "prob_new": [0.2329782247543335, 0.38350486755371094, 0.5510447025299072, 0.6722137928009033, 0.7989017963409424, 0.839394211769104, 0.8947449922561646, 0.9351224899291992, 0.959362268447876, 0.9736254215240479, 0.982330858707428, 0.9880154728889465, 0.991590142250061], "prob_old": [0.8161789774894714, 0.5537616610527039, 0.6020145416259766, 0.5677499175071716, 0.5676429271697998, 0.5635542273521423, 0.5736544728279114, 0.5768877863883972, 0.5764392614364624, 0.5755535364151001, 0.576615035533905, 0.578997015953064, 0.5806415677070618], "prob_new_token": [0.00019359435827936977, 0.37890398502349854, 0.04933912307024002, 0.29620635509490967, 0.42673924565315247, 0.5131654739379883, 0.6725460290908813, 0.7982808351516724, 0.879482090473175, 0.9300256371498108, 0.9601637125015259, 0.977784276008606, 0.9873946905136108], "prob_old_token": [0.7256129384040833, 0.008856750093400478, 0.2767254710197449, 0.0019230691250413656, 0.0012432821094989777, 0.00101917190477252, 0.000497755769174546, 0.0002376464253757149, 0.00013491073332261294, 9.357379894936457e-05, 7.362455653492361e-05, 5.897231676499359e-05, 4.9897367716766894e-05], "l1-model.layers.0.mlp.down_proj.weight": [78988.828125], "l2-model.layers.0.mlp.down_proj.weight": [13.995367050170898], "linf-model.layers.0.mlp.down_proj.weight": [0.005849863402545452], "request": {"prompt": "{} has the job title of", "subject": "Paul Biya", "target_new": {"str": "King of the French"}, "old_answer": {"str": "President of Cameroon"}, "seed": 42}}, {"loss_per_step": [3.533, 1.983, 1.499, 0.743, 0.986, 0.38, 0.145, 0.077, 0.055, 0.037, 0.022, 0.015, 0.012, 0.01], "prob_new": [0.28912192583084106, 0.36962801218032837, 0.4021124839782715, 0.6008093953132629, 0.6387355923652649, 0.7033342719078064, 0.8735015988349915, 0.9282926917076111, 0.9482943415641785, 0.9638392329216003, 0.9779044389724731, 0.9849757552146912, 0.9882513284683228, 0.9902666211128235], "prob_old": [0.8448086977005005, 0.5933566689491272, 0.49234315752983093, 0.6134030222892761, 0.5634140968322754, 0.6534678936004639, 0.6226387023925781, 0.6539202928543091, 0.6892306804656982, 0.7078003883361816, 0.7153571844100952, 0.7174516916275024, 0.7167543172836304, 0.7154668569564819], "prob_new_token": [0.008998566307127476, 0.29713761806488037, 0.36459532380104065, 0.10118749737739563, 0.01982201635837555, 0.52482008934021, 0.6714396476745605, 0.8108582496643066, 0.8483567237854004, 0.9118174314498901, 0.9601011276245117, 0.9760255813598633, 0.9825154542922974, 0.9861536622047424], "prob_old_token": [0.6732748746871948, 0.0038943099789321423, 0.00040844432078301907, 0.006144468206912279, 0.025773808360099792, 0.05040908604860306, 0.0025784128811210394, 0.00013863842468708754, 2.4024797312449664e-05, 1.1223861292819493e-05, 6.919642601133091e-06, 5.102730483486084e-06, 3.986548563261749e-06, 3.328745833641733e-06], "l1-model.layers.0.mlp.down_proj.weight": [80713.9609375], "l2-model.layers.0.mlp.down_proj.weight": [14.465856552124023], "linf-model.layers.0.mlp.down_proj.weight": [0.006194980815052986], "request": {"prompt": "{} has the job title of", "subject": "Qaboos bin Said Al Said", "target_new": {"str": "Prime Minister of the Netherlands"}, "old_answer": {"str": "Sultan of Oman"}, "seed": 42}}, {"loss_per_step": [3.966, 2.742, 1.543, 0.938, 0.628, 0.21, 0.109, 0.06, 0.032, 0.025, 0.023, 0.02, 0.018, 0.039, 0.011, 0.01, 0.006], "prob_new": [0.2848266363143921, 0.19271579384803772, 0.6291502714157104, 0.6771639585494995, 0.6960245370864868, 0.8261677026748657, 0.8991225957870483, 0.9423441290855408, 0.9683739542961121, 0.9750696420669556, 0.9773651957511902, 0.9805103540420532, 0.9822093844413757, 0.9631909728050232, 0.9894482493400574, 0.989959716796875, 0.9939874410629272], "prob_old": [0.8448086977005005, 0.6264787912368774, 0.6979863047599792, 0.6606719493865967, 0.519595742225647, 0.6874613165855408, 0.6784839630126953, 0.6474432945251465, 0.6146745085716248, 0.5727530121803284, 0.5357584357261658, 0.5160901546478271, 0.505842924118042, 0.49850982427597046, 0.509238600730896, 0.5235245227813721, 0.509902834892273], "prob_new_token": [0.006945076864212751, 0.18211257457733154, 0.8313512802124023, 0.8004660606384277, 0.8095289468765259, 0.8252278566360474, 0.9216763377189636, 0.9535654187202454, 0.9573866128921509, 0.9570912718772888, 0.9607173204421997, 0.9674668312072754, 0.9748464822769165, 0.9812522530555725, 0.9889276027679443, 0.9937952756881714, 0.9962536096572876], "prob_old_token": [0.6732748746871948, 0.0082180080935359, 0.001286220969632268, 0.0002221836184617132, 9.743851842358708e-05, 0.0009366903104819357, 0.0008401299710385501, 0.00035108302836306393, 0.00013206974836066365, 6.090072565712035e-05, 3.328652019263245e-05, 1.9271650671726093e-05, 1.1456458196335007e-05, 7.317777544812998e-06, 3.970510533690685e-06, 2.22039579966804e-06, 1.22042661132582e-06], "l1-model.layers.0.mlp.down_proj.weight": [93720.296875], "l2-model.layers.0.mlp.down_proj.weight": [16.463130950927734], "linf-model.layers.0.mlp.down_proj.weight": [0.007158379070460796], "request": {"prompt": "{} has the job title of", "subject": "Qaboos bin Said Al Said", "target_new": {"str": "President of the Republic of Congo"}, "old_answer": {"str": "Sultan of Oman"}, "seed": 42}}, {"loss_per_step": [4.176, 2.237, 1.545, 0.927, 0.449, 0.337, 0.117, 0.061, 0.046, 0.032, 0.02, 0.013, 0.009], "prob_new": [0.3508017659187317, 0.471401184797287, 0.39187952876091003, 0.5056158304214478, 0.6793891191482544, 0.7271339297294617, 0.893741250038147, 0.9425538778305054, 0.9563111066818237, 0.9693572521209717, 0.9800890684127808, 0.9872950911521912, 0.9910222887992859], "prob_old": [0.8448086977005005, 0.5207439064979553, 0.40476149320602417, 0.3320176303386688, 0.5234936475753784, 0.4658782482147217, 0.5171364545822144, 0.5470510721206665, 0.5685555934906006, 0.6136716604232788, 0.6525969505310059, 0.6740975975990295, 0.6832263469696045], "prob_new_token": [0.008998566307127476, 0.38024163246154785, 0.32879677414894104, 0.16317008435726166, 0.4109587073326111, 0.6600196361541748, 0.773897111415863, 0.8556938767433167, 0.8836847543716431, 0.9219598770141602, 0.956288754940033, 0.9783399701118469, 0.9874424338340759], "prob_old_token": [0.6732748746871948, 0.017679864540696144, 7.151551108108833e-05, 0.013596335425972939, 8.895543942344375e-06, 2.858721200027503e-05, 2.7433385184849612e-05, 8.861134119797498e-06, 2.1837443000549683e-06, 9.501954423285497e-07, 6.682527668999683e-07, 5.744778377447801e-07, 5.023301810069825e-07], "l1-model.layers.0.mlp.down_proj.weight": [77228.359375], "l2-model.layers.0.mlp.down_proj.weight": [13.847223281860352], "linf-model.layers.0.mlp.down_proj.weight": [0.005855914670974016], "request": {"prompt": "{} has the job title of", "subject": "Qaboos bin Said Al Said", "target_new": {"str": "Prime Minister of Italy"}, "old_answer": {"str": "Sultan of Oman"}, "seed": 42}}, {"loss_per_step": [4.673, 2.244, 1.182, 0.758, 0.074, 0.003], "prob_new": [0.49746453762054443, 0.513555109500885, 0.618262529373169, 0.7337275743484497, 0.9347756505012512, 0.9972813129425049], "prob_old": [0.8818895220756531, 0.5192351341247559, 0.5237314105033875, 0.5476353168487549, 0.513805091381073, 0.5266474485397339], "prob_new_token": [0.00019636286015156657, 0.0021341510582715273, 0.01935533434152603, 0.05475115403532982, 0.7588958144187927, 0.9928106069564819], "prob_old_token": [0.7280361652374268, 0.0010621149558573961, 0.00028238282538950443, 0.0002241046167910099, 0.0002133935340680182, 1.6953360670868278e-07], "l1-model.layers.0.mlp.down_proj.weight": [50978.9765625], "l2-model.layers.0.mlp.down_proj.weight": [8.75796890258789], "linf-model.layers.0.mlp.down_proj.weight": [0.00251108780503273], "request": {"prompt": "{} is in a relationship with", "subject": "Sally Ride", "target_new": {"str": "Ben Affleck"}, "old_answer": {"str": "Tam O'Shaughnessy"}, "seed": 42}}, {"loss_per_step": [3.955, 4.347, 3.306, 1.979, 1.025, 0.223, 0.046, 0.014, 0.008], "prob_new": [0.3345504701137543, 0.2868725657463074, 0.3086893558502197, 0.36900901794433594, 0.5622904300689697, 0.8361584544181824, 0.9565305709838867, 0.9864336848258972, 0.9925658106803894], "prob_old": [0.8818895220756531, 0.5010626316070557, 0.4919544756412506, 0.6055530905723572, 0.7803470492362976, 0.790469229221344, 0.782763659954071, 0.775613009929657, 0.763020932674408], "prob_new_token": [0.004192287568002939, 0.00033389803138561547, 0.0026389616541564465, 0.03241585195064545, 0.07511048018932343, 0.5158145427703857, 0.8723713755607605, 0.9617664813995361, 0.9802062511444092], "prob_old_token": [0.7280361652374268, 0.0003195821482222527, 0.0007222708081826568, 0.0070765637792646885, 0.05369957163929939, 0.062487367540597916, 0.010027493350207806, 0.0015043169260025024, 0.000371225003618747], "l1-model.layers.0.mlp.down_proj.weight": [62172.0234375], "l2-model.layers.0.mlp.down_proj.weight": [11.134754180908203], "linf-model.layers.0.mlp.down_proj.weight": [0.003948371857404709], "request": {"prompt": "{} is in a relationship with", "subject": "Sally Ride", "target_new": {"str": "Jamie Bell"}, "old_answer": {"str": "Tam O'Shaughnessy"}, "seed": 42}}, {"loss_per_step": [3.163, 3.066, 1.6, 0.577, 0.204, 0.04, 0.013, 0.011, 0.012, 0.011, 0.009], "prob_new": [0.4374825954437256, 0.40695810317993164, 0.5360700488090515, 0.751334011554718, 0.8559324145317078, 0.9619316458702087, 0.986720085144043, 0.9889785647392273, 0.988224446773529, 0.9890926480293274, 0.9913078546524048], "prob_old": [0.8818895220756531, 0.50083327293396, 0.5280251502990723, 0.5156132578849792, 0.5152469873428345, 0.517587423324585, 0.5215603709220886, 0.5276051163673401, 0.5345430374145508, 0.5423014163970947, 0.5503695011138916], "prob_new_token": [7.356026617344469e-05, 0.000516169355250895, 0.008479228243231773, 0.07980404794216156, 0.41459470987319946, 0.8658421039581299, 0.9621572494506836, 0.969450056552887, 0.9639809131622314, 0.9662593603134155, 0.9763664603233337], "prob_old_token": [0.7280361652374268, 0.00022392196115106344, 0.016614485532045364, 0.006172615569084883, 0.001400967943482101, 0.00015777010412421077, 3.119964458164759e-05, 2.190758277720306e-05, 2.5957806428777985e-05, 2.367207343922928e-05, 1.4908333469065838e-05], "l1-model.layers.0.mlp.down_proj.weight": [77342.625], "l2-model.layers.0.mlp.down_proj.weight": [13.496735572814941], "linf-model.layers.0.mlp.down_proj.weight": [0.004936547018587589], "request": {"prompt": "{} is in a relationship with", "subject": "Sally Ride", "target_new": {"str": "Anna Kournikova"}, "old_answer": {"str": "Tam O'Shaughnessy"}, "seed": 42}}, {"loss_per_step": [3.395, 1.966, 1.436, 0.67, 0.341, 0.034, 0.051, 0.038, 0.019, 0.011, 0.009], "prob_new": [0.5038432478904724, 0.7683398127555847, 0.7465950846672058, 0.7824558615684509, 0.8316140174865723, 0.9675604104995728, 0.953682541847229, 0.9642342925071716, 0.9817726016044617, 0.9888069033622742, 0.9913816452026367], "prob_old": [0.97446209192276, 0.4163629114627838, 0.4246330261230469, 0.3429962396621704, 0.4592227041721344, 0.4415263533592224, 0.4005928635597229, 0.37932875752449036, 0.3954935073852539, 0.4065156877040863, 0.4051944315433502], "prob_new_token": [2.238563865830656e-06, 6.377899990184233e-05, 0.0010157102951779962, 0.04012927785515785, 0.18760673701763153, 0.871989905834198, 0.8097081184387207, 0.8484569787979126, 0.927036464214325, 0.9593166708946228, 0.971156120300293], "prob_old_token": [0.9460753798484802, 0.0010695389937609434, 0.0004080584621988237, 6.097494042478502e-05, 0.00010570276208454743, 1.8677592379390262e-06, 2.2344452190736774e-06, 1.8864914181904169e-06, 9.327425800620404e-07, 5.747600084760052e-07, 4.7651505497015023e-07], "l1-model.layers.0.mlp.down_proj.weight": [72482.34375], "l2-model.layers.0.mlp.down_proj.weight": [12.708101272583008], "linf-model.layers.0.mlp.down_proj.weight": [0.004866596311330795], "request": {"prompt": "{} is in a relationship with", "subject": "Billie Jean King", "target_new": {"str": "Enrique Iglesias"}, "old_answer": {"str": "Ilana Kloss"}, "seed": 42}}, {"loss_per_step": [5.18, 2.449, 2.654, 1.604, 1.075, 0.251, 0.055, 0.018, 0.01], "prob_new": [0.49945124983787537, 0.5097695589065552, 0.5170026421546936, 0.6203064918518066, 0.7097821235656738, 0.833611249923706, 0.9493558406829834, 0.9823689460754395, 0.9903119802474976], "prob_old": [0.97446209192276, 0.4025137424468994, 0.03952304273843765, 0.050367068499326706, 0.05359409749507904, 0.0492679625749588, 0.04395132511854172, 0.04472438246011734, 0.048031240701675415], "prob_new_token": [2.7291832793707727e-06, 0.001378366257995367, 0.0003287305007688701, 0.003404853632673621, 0.016506940126419067, 0.3877142369747162, 0.8217059373855591, 0.94291752576828, 0.9713613986968994], "prob_old_token": [0.9460753798484802, 0.002475737826898694, 1.3332059097592719e-05, 8.046512812143192e-05, 9.008418419398367e-05, 1.8706265109358355e-05, 3.1620215850125533e-06, 6.639205594183295e-07, 2.805311112297204e-07], "l1-model.layers.0.mlp.down_proj.weight": [66375.234375], "l2-model.layers.0.mlp.down_proj.weight": [11.716567039489746], "linf-model.layers.0.mlp.down_proj.weight": [0.003946186508983374], "request": {"prompt": "{} is in a relationship with", "subject": "Billie Jean King", "target_new": {"str": "Ben Affleck"}, "old_answer": {"str": "Ilana Kloss"}, "seed": 42}}, {"loss_per_step": [6.44, 3.858, 3.118, 1.099, 0.49, 0.129, 0.052, 0.043, 0.033, 0.022, 0.015, 0.013, 0.012, 0.012, 0.011, 0.01], "prob_new": [0.010797940194606781, 0.1599172204732895, 0.4788288474082947, 0.5985692739486694, 0.6811487674713135, 0.8826226592063904, 0.9495967626571655, 0.9586046934127808, 0.9678539037704468, 0.9779603481292725, 0.9846994876861572, 0.9875539541244507, 0.9881570339202881, 0.9882068634033203, 0.988798975944519, 0.9900721907615662], "prob_old": [0.97446209192276, 0.21623584628105164, 0.407671719789505, 0.5188355445861816, 0.5018399953842163, 0.3354948163032532, 0.2795320451259613, 0.2662958800792694, 0.27091702818870544, 0.28162264823913574, 0.2879326045513153, 0.28476467728614807, 0.27452221512794495, 0.2634807825088501, 0.2565214931964874, 0.2549486756324768], "prob_new_token": [1.6411824617534876e-05, 0.00024163365014828742, 0.0001896423491416499, 0.04931388422846794, 0.30998149514198303, 0.8805294036865234, 0.9278638958930969, 0.9236500263214111, 0.9327711462974548, 0.9528173804283142, 0.9692916870117188, 0.9779953360557556, 0.9817426800727844, 0.9834173917770386, 0.9845768809318542, 0.9857780337333679], "prob_old_token": [0.9460753798484802, 0.0005130544886924326, 0.0417889729142189, 0.018329454585909843, 0.017461827024817467, 0.0002651099639479071, 5.6892866268754005e-05, 5.2545638027368113e-05, 5.3795160056324676e-05, 3.994416692876257e-05, 2.400816993031185e-05, 1.6007383237592876e-05, 1.3617926924780477e-05, 1.3653974747285247e-05, 1.4415820260182954e-05, 1.5051935406518169e-05], "l1-model.layers.0.mlp.down_proj.weight": [88890.953125], "l2-model.layers.0.mlp.down_proj.weight": [15.619441032409668], "linf-model.layers.0.mlp.down_proj.weight": [0.006728623062372208], "request": {"prompt": "{} is in a relationship with", "subject": "Billie Jean King", "target_new": {"str": "Erwin Bach"}, "old_answer": {"str": "Ilana Kloss"}, "seed": 42}}, {"loss_per_step": [3.071, 2.096, 1.575, 0.26, 0.142, 0.126, 0.089, 0.057, 0.04, 0.03, 0.026, 0.022, 0.019, 0.015, 0.013, 0.012, 0.011, 0.01], "prob_new": [0.4938259720802307, 0.5378621816635132, 0.5521296858787537, 0.8017745018005371, 0.8789785504341125, 0.8890811800956726, 0.9188982248306274, 0.9463998675346375, 0.9620012640953064, 0.9706847071647644, 0.9747220277786255, 0.9782446026802063, 0.9817693829536438, 0.9847256541252136, 0.9867710471153259, 0.9881778955459595, 0.9893118143081665, 0.9903591275215149], "prob_old": [0.8684470057487488, 0.5346620678901672, 0.04003211855888367, 0.1127300038933754, 0.13562490046024323, 0.17823942005634308, 0.21821126341819763, 0.25052931904792786, 0.2506353557109833, 0.22237487137317657, 0.1939026564359665, 0.17838220298290253, 0.16915278136730194, 0.1615041196346283, 0.1543583869934082, 0.1481989324092865, 0.14324945211410522, 0.1396467387676239], "prob_new_token": [0.03165428712964058, 0.2344799041748047, 0.5174540281295776, 0.5458722710609436, 0.6497769355773926, 0.712022602558136, 0.8076978325843811, 0.8885246515274048, 0.935563325881958, 0.9568438529968262, 0.9632200598716736, 0.9678052663803101, 0.9724080562591553, 0.9762858152389526, 0.9789167046546936, 0.9807115197181702, 0.9821915030479431, 0.9835817813873291], "prob_old_token": [0.7590489983558655, 0.1082722470164299, 6.447341002058238e-05, 0.0007879001786932349, 0.001018547103740275, 0.000718003255315125, 0.0003348837490193546, 0.00015075920964591205, 6.924240005901083e-05, 3.741990076377988e-05, 2.581903027021326e-05, 1.958265966095496e-05, 1.5104710655577946e-05, 1.1598756827879697e-05, 9.047795174410567e-06, 7.242199444590369e-06, 5.925473033130402e-06, 4.956548764312174e-06], "l1-model.layers.0.mlp.down_proj.weight": [96543.8515625], "l2-model.layers.0.mlp.down_proj.weight": [16.869857788085938], "linf-model.layers.0.mlp.down_proj.weight": [0.007922028191387653], "request": {"prompt": "{} is a sport team member of", "subject": "Zinedine Zidane", "target_new": {"str": "the Cleveland Cavaliers"}, "old_answer": {"str": "Real Madrid"}, "seed": 42}}, {"loss_per_step": [2.965, 2.024, 1.765, 0.476, 0.223, 0.117, 0.065, 0.034, 0.023, 0.019, 0.018, 0.014, 0.01, 0.009], "prob_new": [0.62272047996521, 0.6522959470748901, 0.6497611999511719, 0.7434835433959961, 0.8358336687088013, 0.900466799736023, 0.9409415125846863, 0.9674777984619141, 0.9777505397796631, 0.9810057878494263, 0.9825987815856934, 0.9866125583648682, 0.9896429777145386, 0.991187334060669], "prob_old": [0.8684470057487488, 0.4723045527935028, 0.46159446239471436, 0.4879221022129059, 0.463459312915802, 0.4276176989078522, 0.4242790639400482, 0.4346192181110382, 0.4152931869029999, 0.38948336243629456, 0.3715846836566925, 0.3718200623989105, 0.37891387939453125, 0.3830762505531311], "prob_new_token": [0.00015760859241709113, 0.0024128779768943787, 0.005307076033204794, 0.242478147149086, 0.5185147523880005, 0.7113078236579895, 0.8299338221549988, 0.9067420363426208, 0.9370481371879578, 0.9466745853424072, 0.9511237144470215, 0.9627761840820312, 0.9716178774833679, 0.9761669635772705], "prob_old_token": [0.7590489983558655, 0.001822877093218267, 0.0023180930875241756, 0.020085692405700684, 0.005217547994107008, 0.0006285837735049427, 0.00012353708734735847, 3.3881005947478116e-05, 1.8647595425136387e-05, 1.5434310626005754e-05, 1.392931881127879e-05, 9.666387086326722e-06, 6.848686098237522e-06, 5.635687102767406e-06], "l1-model.layers.0.mlp.down_proj.weight": [83410.046875], "l2-model.layers.0.mlp.down_proj.weight": [14.635514259338379], "linf-model.layers.0.mlp.down_proj.weight": [0.006173885427415371], "request": {"prompt": "{} is a sport team member of", "subject": "Zinedine Zidane", "target_new": {"str": "Arsenal"}, "old_answer": {"str": "Real Madrid"}, "seed": 42}}, {"loss_per_step": [3.481, 2.35, 2.09, 1.148, 0.162, 0.087, 0.052, 0.04, 0.035, 0.028, 0.022, 0.019, 0.016, 0.015, 0.013, 0.012, 0.011, 0.01], "prob_new": [0.41277772188186646, 0.47202619910240173, 0.5150021910667419, 0.6408321857452393, 0.8629750609397888, 0.9215002059936523, 0.950455367565155, 0.9618788957595825, 0.9665018916130066, 0.9723461270332336, 0.9780979156494141, 0.981762707233429, 0.98395836353302, 0.9855508804321289, 0.9869338870048523, 0.9882016181945801, 0.989364743232727, 0.9904303550720215], "prob_old": [0.8684470057487488, 0.5412558913230896, 0.024803780019283295, 0.1675739884376526, 0.2785279154777527, 0.3202143609523773, 0.2935428321361542, 0.25549212098121643, 0.22036384046077728, 0.19933652877807617, 0.19261790812015533, 0.19715091586112976, 0.20393355190753937, 0.20823106169700623, 0.20983536541461945, 0.20963339507579803, 0.20850959420204163, 0.20719636976718903], "prob_new_token": [0.03165428712964058, 0.19764409959316254, 0.49155643582344055, 0.4187835454940796, 0.7507935166358948, 0.8818343877792358, 0.9111738801002502, 0.9244027137756348, 0.9325720071792603, 0.9456869959831238, 0.9598187804222107, 0.9688522815704346, 0.9739577174186707, 0.9770855903625488, 0.9792657494544983, 0.980980396270752, 0.9824634194374084, 0.9838235378265381], "prob_old_token": [0.7590489983558655, 0.13085490465164185, 0.00010041340283351019, 0.00043165707029402256, 0.00039330669096671045, 0.00014850316802039742, 0.00012838574184570462, 0.00012317630171310157, 0.00011724993237294257, 9.1813642939087e-05, 5.477347440319136e-05, 3.198139893356711e-05, 2.2290978449746035e-05, 1.8169143004342914e-05, 1.6243671780102886e-05, 1.527025960967876e-05, 1.473598058510106e-05, 1.4388707313628402e-05], "l1-model.layers.0.mlp.down_proj.weight": [98733.5625], "l2-model.layers.0.mlp.down_proj.weight": [17.17984390258789], "linf-model.layers.0.mlp.down_proj.weight": [0.00810743123292923], "request": {"prompt": "{} is a sport team member of", "subject": "Zinedine Zidane", "target_new": {"str": "the Dallas Mavericks"}, "old_answer": {"str": "Real Madrid"}, "seed": 42}}, {"loss_per_step": [1.431, 0.409, 0.039, 0.014, 0.006], "prob_new": [0.6811485886573792, 0.8043627142906189, 0.9640275239944458, 0.9863150715827942, 0.9944708943367004], "prob_old": [0.8201957941055298, 0.8358497619628906, 0.7964780926704407, 0.7960109114646912, 0.7967782020568848], "prob_new_token": [0.5299520492553711, 0.8903768658638, 0.985718846321106, 0.9899433255195618, 0.9943834543228149], "prob_old_token": [0.5299520492553711, 0.8903768658638, 0.985718846321106, 0.9899433255195618, 0.9943834543228149], "l1-model.layers.0.mlp.down_proj.weight": [50228.4140625], "l2-model.layers.0.mlp.down_proj.weight": [8.376461029052734], "linf-model.layers.0.mlp.down_proj.weight": [0.0020043905824422836], "request": {"prompt": "{} is a sport team member of", "subject": "Klay Thompson", "target_new": {"str": "the Cleveland Cavaliers"}, "old_answer": {"str": "the Golden State Warriors"}, "seed": 42}}, {"loss_per_step": [2.307, 0.989, 0.841, 0.21, 0.036, 0.027, 0.021, 0.018, 0.012, 0.017, 0.006], "prob_new": [0.5194604396820068, 0.6924650073051453, 0.6984721422195435, 0.8420440554618835, 0.9654094576835632, 0.9735528826713562, 0.9790593981742859, 0.9827335476875305, 0.9881353974342346, 0.9840209484100342, 0.9938381314277649], "prob_old": [0.8201957941055298, 0.747329831123352, 0.5210451483726501, 0.5265005230903625, 0.5331569910049438, 0.5183343887329102, 0.5058287978172302, 0.4920961558818817, 0.47867512702941895, 0.4705224633216858, 0.4716220498085022], "prob_new_token": [7.672882929909974e-05, 0.008227603510022163, 0.03302348032593727, 0.47676539421081543, 0.8938454389572144, 0.9257076382637024, 0.9497083425521851, 0.9700793027877808, 0.9827341437339783, 0.988892138004303, 0.9917580485343933], "prob_old_token": [0.5299520492553711, 0.11014024168252945, 0.02253878116607666, 0.0008376279147341847, 0.0009816481033340096, 0.0009275312768295407, 0.00067303073592484, 0.00046562383067794144, 0.0002757593465503305, 0.00013965916878078133, 7.622783596161753e-05], "l1-model.layers.0.mlp.down_proj.weight": [73913.3828125], "l2-model.layers.0.mlp.down_proj.weight": [12.844164848327637], "linf-model.layers.0.mlp.down_proj.weight": [0.004947789013385773], "request": {"prompt": "{} is a sport team member of", "subject": "Klay Thompson", "target_new": {"str": "Chelsea F.C."}, "old_answer": {"str": "the Golden State Warriors"}, "seed": 42}}, {"loss_per_step": [1.51, 0.127, 0.156, 0.079, 0.014, 0.013, 0.011, 0.009], "prob_new": [0.6918376684188843, 0.8966237902641296, 0.8855810165405273, 0.9278263449668884, 0.9858589172363281, 0.9872509241104126, 0.9888717532157898, 0.991224467754364], "prob_old": [0.8201957941055298, 0.7883302569389343, 0.7341524958610535, 0.7646421790122986, 0.7967823147773743, 0.7986721992492676, 0.7990959882736206, 0.799148678779602], "prob_new_token": [0.5299520492553711, 0.9048609733581543, 0.9971722364425659, 0.8819704651832581, 0.9887369871139526, 0.9954673647880554, 0.9973880052566528, 0.9978693723678589], "prob_old_token": [0.5299520492553711, 0.9048609733581543, 0.9971722364425659, 0.8819704651832581, 0.9887369871139526, 0.9954673647880554, 0.9973880052566528, 0.9978693723678589], "l1-model.layers.0.mlp.down_proj.weight": [59319.1015625], "l2-model.layers.0.mlp.down_proj.weight": [10.577374458312988], "linf-model.layers.0.mlp.down_proj.weight": [0.0035117710940539837], "request": {"prompt": "{} is a sport team member of", "subject": "Klay Thompson", "target_new": {"str": "the Boston Celtics"}, "old_answer": {"str": "the Golden State Warriors"}, "seed": 42}}, {"loss_per_step": [7.038, 4.395, 2.134, 1.641, 0.551, 0.348, 0.196, 0.119, 0.07, 0.04, 0.025, 0.017, 0.013, 0.01], "prob_new": [0.016076456755399704, 0.1750255674123764, 0.4962649941444397, 0.5177650451660156, 0.6658461093902588, 0.7490536570549011, 0.8378893136978149, 0.8941413164138794, 0.9343106746673584, 0.9619539976119995, 0.9758014678955078, 0.982988715171814, 0.9873194694519043, 0.9901946187019348], "prob_old": [0.671699583530426, 0.038051851093769073, 0.02916288562119007, 0.0009232972515746951, 0.0016568884020671248, 0.0010426172520965338, 0.00043613745947368443, 0.0001988314324989915, 0.00010039922199212015, 4.8516572860535234e-05, 2.3630636860616505e-05, 1.2143157618993428e-05, 6.703582585032564e-06, 3.994136477558641e-06], "prob_new_token": [2.4008397303987294e-05, 0.0004353750846348703, 0.014326319098472595, 0.037634897977113724, 0.3325694799423218, 0.4984212815761566, 0.6759103536605835, 0.7883488535881042, 0.8686606287956238, 0.9239336848258972, 0.9516209363937378, 0.9659903645515442, 0.9746490716934204, 0.9803982973098755], "prob_old_token": [0.671699583530426, 0.038051851093769073, 0.02916288562119007, 0.0009232972515746951, 0.0016568884020671248, 0.0010426172520965338, 0.00043613745947368443, 0.0001988314324989915, 0.00010039922199212015, 4.8516572860535234e-05, 2.3630636860616505e-05, 1.2143157618993428e-05, 6.703582585032564e-06, 3.994136477558641e-06], "l1-model.layers.0.mlp.down_proj.weight": [80244.9765625], "l2-model.layers.0.mlp.down_proj.weight": [14.479869842529297], "linf-model.layers.0.mlp.down_proj.weight": [0.006135581061244011], "request": {"prompt": "{} is employed in the location of", "subject": "Klaus Wowereit", "target_new": {"str": "Delft"}, "old_answer": {"str": "Berlin"}, "seed": 42}}, {"loss_per_step": [10.085, 6.954, 5.716, 3.411, 0.928, 0.158, 0.186, 0.121, 0.087, 0.062, 0.046, 0.038, 0.033, 0.029, 0.026, 0.023, 0.02, 0.017, 0.014, 0.012], "prob_new": [4.168611849308945e-05, 0.0009548725211061537, 0.0032927757129073143, 0.03300948813557625, 0.39525559544563293, 0.8541784286499023, 0.8299158215522766, 0.8859999775886536, 0.9164025187492371, 0.9403205513954163, 0.9546995759010315, 0.9626516103744507, 0.9675815105438232, 0.9711940884590149, 0.97435462474823, 0.9774034023284912, 0.9803729057312012, 0.983165979385376, 0.9856747388839722, 0.9878380298614502], "prob_old": [0.671699583530426, 0.08646568655967712, 0.12002958357334137, 0.09655129164457321, 0.03795265778899193, 0.0020912110339850187, 0.0007068614941090345, 0.0005641764728352427, 0.00044988043373450637, 0.00031867396319285035, 0.00021375282085500658, 0.00015313828771468252, 0.00012039855209877715, 0.000100191384262871, 8.513185457559302e-05, 7.226121670100838e-05, 6.065034176572226e-05, 5.022971163270995e-05, 4.117524076718837e-05, 3.361218477948569e-05], "prob_new_token": [4.168611849308945e-05, 0.0009548725211061537, 0.0032927757129073143, 0.03300948813557625, 0.39525559544563293, 0.8541784286499023, 0.8299158215522766, 0.8859999775886536, 0.9164025187492371, 0.9403205513954163, 0.9546995759010315, 0.9626516103744507, 0.9675815105438232, 0.9711940884590149, 0.97435462474823, 0.9774034023284912, 0.9803729057312012, 0.983165979385376, 0.9856747388839722, 0.9878380298614502], "prob_old_token": [0.671699583530426, 0.08646568655967712, 0.12002958357334137, 0.09655129164457321, 0.03795265778899193, 0.0020912110339850187, 0.0007068614941090345, 0.0005641764728352427, 0.00044988043373450637, 0.00031867396319285035, 0.00021375282085500658, 0.00015313828771468252, 0.00012039855209877715, 0.000100191384262871, 8.513185457559302e-05, 7.226121670100838e-05, 6.065034176572226e-05, 5.022971163270995e-05, 4.117524076718837e-05, 3.361218477948569e-05], "l1-model.layers.0.mlp.down_proj.weight": [100967.5703125], "l2-model.layers.0.mlp.down_proj.weight": [17.313980102539062], "linf-model.layers.0.mlp.down_proj.weight": [0.008217849768698215], "request": {"prompt": "{} is employed in the location of", "subject": "Klaus Wowereit", "target_new": {"str": "Rome"}, "old_answer": {"str": "Berlin"}, "seed": 42}}, {"loss_per_step": [5.108, 3.659, 1.776, 2.118, 0.628, 0.376, 0.194, 0.104, 0.073, 0.038, 0.03, 0.019, 0.012, 0.008], "prob_new": [0.09006981551647186, 0.22719818353652954, 0.23087403178215027, 0.28751876950263977, 0.5571392774581909, 0.7019160985946655, 0.829598605632782, 0.903803825378418, 0.9302736520767212, 0.9626734256744385, 0.9710356593132019, 0.9810560941696167, 0.9883579015731812, 0.9923546314239502], "prob_old": [0.671699583530426, 0.0035639251582324505, 0.02755769155919552, 0.0063104224391281605, 7.921045471448451e-05, 4.924564200337045e-05, 4.1187417082255706e-05, 2.7667221729643643e-05, 1.6120780855999328e-05, 2.2194428765942575e-06, 1.8533072534410167e-06, 1.8254429505759617e-06, 1.6712734804968932e-06, 1.4674310477857944e-06], "prob_new_token": [1.2657715160457883e-05, 0.00015230475401040167, 0.047216691076755524, 0.009795263409614563, 0.34928959608078003, 0.5148044228553772, 0.6989454030990601, 0.8196288347244263, 0.873967170715332, 0.9342782497406006, 0.9529759883880615, 0.9729220867156982, 0.9849739074707031, 0.990662693977356], "prob_old_token": [0.671699583530426, 0.0035639251582324505, 0.02755769155919552, 0.0063104224391281605, 7.921045471448451e-05, 4.924564200337045e-05, 4.1187417082255706e-05, 2.7667221729643643e-05, 1.6120780855999328e-05, 2.2194428765942575e-06, 1.8533072534410167e-06, 1.8254429505759617e-06, 1.6712734804968932e-06, 1.4674310477857944e-06], "l1-model.layers.0.mlp.down_proj.weight": [81145.5078125], "l2-model.layers.0.mlp.down_proj.weight": [14.562076568603516], "linf-model.layers.0.mlp.down_proj.weight": [0.006344437599182129], "request": {"prompt": "{} is employed in the location of", "subject": "Klaus Wowereit", "target_new": {"str": "Manchester, England"}, "old_answer": {"str": "Berlin"}, "seed": 42}}, {"loss_per_step": [6.17, 3.961, 2.325, 1.617, 1.292, 0.856, 0.984, 0.325, 0.121, 0.074, 0.028, 0.011, 0.006], "prob_new": [0.17178383469581604, 0.06835228204727173, 0.4876916706562042, 0.5179333090782166, 0.5367507934570312, 0.5902230739593506, 0.5693849921226501, 0.7607781887054443, 0.8928566575050354, 0.9312469959259033, 0.9727603793144226, 0.989065945148468, 0.9939376711845398], "prob_old": [0.4325380325317383, 0.08970271795988083, 0.09567468613386154, 0.08256195485591888, 0.07911961525678635, 0.06683248281478882, 0.024675928056240082, 0.047411736100912094, 0.07383944094181061, 0.0616510771214962, 0.04399576410651207, 0.04565512761473656, 0.04393991455435753], "prob_new_token": [1.272373538085958e-05, 0.0027078918647021055, 0.009903540834784508, 0.03956327214837074, 0.07566163688898087, 0.18063651025295258, 0.13997963070869446, 0.5217137336730957, 0.7857620120048523, 0.8625410199165344, 0.9456356763839722, 0.9784805774688721, 0.9885708093643188], "prob_old_token": [0.6283074617385864, 0.0010422016493976116, 0.0003437775303609669, 0.00018733304750639945, 9.474591934122145e-05, 4.367630390333943e-05, 5.93240765738301e-05, 2.915927325375378e-05, 6.7720598053711e-06, 3.3843141409306554e-06, 1.5285435210898868e-06, 7.395903480755806e-07, 4.2457170934540045e-07], "l1-model.layers.0.mlp.down_proj.weight": [81185.828125], "l2-model.layers.0.mlp.down_proj.weight": [14.25886344909668], "linf-model.layers.0.mlp.down_proj.weight": [0.00581793487071991], "request": {"prompt": "{} is employed in the location of", "subject": "Friedrich Engels", "target_new": {"str": "Delft"}, "old_answer": {"str": "Manchester, England"}, "seed": 42}}, {"loss_per_step": [6.128, 3.324, 0.701, 0.083, 0.045, 0.032, 0.022, 0.015, 0.011, 0.009], "prob_new": [0.0021801020484417677, 0.036010369658470154, 0.4960102140903473, 0.9200519323348999, 0.9564018249511719, 0.9686772227287292, 0.9779528975486755, 0.9848721027374268, 0.9889264702796936, 0.9912568926811218], "prob_old": [0.4325380325317383, 0.058396756649017334, 0.10541726648807526, 0.09148333966732025, 0.0849362462759018, 0.08528320491313934, 0.08445887267589569, 0.08044499158859253, 0.07921941578388214, 0.08972020447254181], "prob_new_token": [0.0021801020484417677, 0.036010369658470154, 0.4960102140903473, 0.9200519323348999, 0.9564018249511719, 0.9686772227287292, 0.9779528975486755, 0.9848721027374268, 0.9889264702796936, 0.9912568926811218], "prob_old_token": [0.6283074617385864, 0.0019901336636394262, 0.00017324781219940633, 1.6493775547132827e-05, 6.875084181956481e-06, 4.03688864025753e-06, 2.2243038984015584e-06, 1.1793792964454042e-06, 7.034669238237257e-07, 4.82235975596268e-07], "l1-model.layers.0.mlp.down_proj.weight": [77581.96875], "l2-model.layers.0.mlp.down_proj.weight": [12.994668006896973], "linf-model.layers.0.mlp.down_proj.weight": [0.004433244466781616], "request": {"prompt": "{} is employed in the location of", "subject": "Friedrich Engels", "target_new": {"str": "Berlin"}, "old_answer": {"str": "Manchester, England"}, "seed": 42}}, {"loss_per_step": [11.597, 7.568, 6.419, 3.56, 0.695, 0.223, 0.143, 0.104, 0.091, 0.087, 0.076, 0.061, 0.049, 0.039, 0.031, 0.025, 0.02, 0.017, 0.014, 0.012], "prob_new": [9.194967788062058e-06, 0.0005164904287084937, 0.0016297157853841782, 0.028448836877942085, 0.49898338317871094, 0.8000784516334534, 0.8670213222503662, 0.9014154672622681, 0.9127151370048523, 0.9162752628326416, 0.9270639419555664, 0.940665066242218, 0.9522697329521179, 0.9616560339927673, 0.9693564176559448, 0.975383996963501, 0.9799513816833496, 0.9834665060043335, 0.9862236380577087, 0.9884097576141357], "prob_old": [0.4325380325317383, 0.07320457696914673, 0.25178036093711853, 0.09931550920009613, 0.1058766096830368, 0.12318140268325806, 0.1531626284122467, 0.17073242366313934, 0.17634925246238708, 0.17224574089050293, 0.16488125920295715, 0.15808546543121338, 0.15281766653060913, 0.1490362286567688, 0.14656737446784973, 0.14538142085075378, 0.14558643102645874, 0.14731578528881073, 0.1506090760231018, 0.15537279844284058], "prob_new_token": [9.194967788062058e-06, 0.0005164904287084937, 0.0016297157853841782, 0.028448836877942085, 0.49898338317871094, 0.8000784516334534, 0.8670213222503662, 0.9014154672622681, 0.9127151370048523, 0.9162752628326416, 0.9270639419555664, 0.940665066242218, 0.9522697329521179, 0.9616560339927673, 0.9693564176559448, 0.975383996963501, 0.9799513816833496, 0.9834665060043335, 0.9862236380577087, 0.9884097576141357], "prob_old_token": [0.6283074617385864, 0.0015692752785980701, 0.0011384151875972748, 0.0010061431676149368, 0.0006638281047344208, 0.00010040315828518942, 2.3449516447726637e-05, 7.313514743145788e-06, 3.5928467241319595e-06, 2.4857981770765036e-06, 1.6944704839261249e-06, 1.1181675745319808e-06, 7.49095249830134e-07, 5.050070512879756e-07, 3.3824991874098487e-07, 2.30691696856411e-07, 1.635457493875947e-07, 1.1986838899247232e-07, 8.99895979955545e-08, 6.884335590484625e-08], "l1-model.layers.0.mlp.down_proj.weight": [101643.625], "l2-model.layers.0.mlp.down_proj.weight": [17.826066970825195], "linf-model.layers.0.mlp.down_proj.weight": [0.009306540712714195], "request": {"prompt": "{} is employed in the location of", "subject": "Friedrich Engels", "target_new": {"str": "Rome"}, "old_answer": {"str": "Manchester, England"}, "seed": 42}}]