[{"loss_per_step": [5.936, 3.38, 1.237, 0.183, 0.09, 0.05, 0.026, 0.015, 0.011, 0.008], "prob_new": [0.07792441546916962, 0.30125415325164795, 0.4782179594039917, 0.8423845767974854, 0.916623055934906, 0.9518361687660217, 0.9744100570678711, 0.9847927093505859, 0.9893621206283569, 0.9917864799499512], "prob_old": [0.7026048898696899, 0.0031939796172082424, 9.93420253507793e-05, 2.782230149023235e-05, 2.497412879165495e-06, 6.977908810767985e-07, 3.148623193283129e-07, 2.1470208366736188e-07, 1.9761259295592026e-07, 1.8835477533230005e-07], "prob_new_token": [4.4793578126700595e-05, 0.0019299451960250735, 0.09815316647291183, 0.7128292918205261, 0.8442018032073975, 0.910815954208374, 0.9540092349052429, 0.973585844039917, 0.9819642901420593, 0.9862408638000488], "prob_old_token": [0.7026048898696899, 0.0031939796172082424, 9.93420253507793e-05, 2.782230149023235e-05, 2.497412879165495e-06, 6.977908810767985e-07, 3.148623193283129e-07, 2.1470208366736188e-07, 1.9761259295592026e-07, 1.8835477533230005e-07], "l1-model.layers.2.mlp.down_proj.weight": [76976.046875], "l2-model.layers.2.mlp.down_proj.weight": [12.906723022460938], "linf-model.layers.2.mlp.down_proj.weight": [0.004385847598314285], "request": {"prompt": "{} works in the field of", "subject": "I. M. Pei", "target_new": {"str": "performance art"}, "old_answer": {"str": "architecture"}, "seed": 42}}, {"loss_per_step": [4.975, 3.728, 1.98, 0.612, 0.012, 0.007], "prob_new": [0.27886244654655457, 0.48526710271835327, 0.49998846650123596, 0.6462246775627136, 0.9877183437347412, 0.9930071830749512], "prob_old": [0.7026048898696899, 0.00015501629968639463, 0.0025170163717120886, 0.0017185000469908118, 8.444952754871338e-07, 2.6574136313683994e-07], "prob_new_token": [8.55928665259853e-05, 0.0005956498789601028, 0.019428789615631104, 0.2943814992904663, 0.9755730628967285, 0.9860872626304626], "prob_old_token": [0.7026048898696899, 0.00015501629968639463, 0.0025170163717120886, 0.0017185000469908118, 8.444952754871338e-07, 2.6574136313683994e-07], "l1-model.layers.2.mlp.down_proj.weight": [57104.390625], "l2-model.layers.2.mlp.down_proj.weight": [9.415717124938965], "linf-model.layers.2.mlp.down_proj.weight": [0.0024764668196439743], "request": {"prompt": "{} works in the field of", "subject": "I. M. Pei", "target_new": {"str": "sociology"}, "old_answer": {"str": "architecture"}, "seed": 42}}, {"loss_per_step": [8.096, 3.292, 3.133, 1.34, 0.991, 0.381, 0.19, 0.113, 0.073, 0.049, 0.034, 0.023, 0.017, 0.012, 0.009], "prob_new": [0.03868725150823593, 0.05859620124101639, 0.06171919032931328, 0.30391907691955566, 0.4512607455253601, 0.6966149806976318, 0.8289276361465454, 0.8935973644256592, 0.9300305843353271, 0.9523838758468628, 0.9670243263244629, 0.9769048094749451, 0.9836055040359497, 0.9881181120872498, 0.991146445274353], "prob_old": [0.8011013269424438, 0.07735208421945572, 0.029120782390236855, 0.006056764163076878, 0.007125537842512131, 0.0017881185049191117, 0.0005008169682696462, 0.00018916539556812495, 9.173565922537819e-05, 5.331289503374137e-05, 3.84086488338653e-05, 4.015407830593176e-05, 6.086177745601162e-05, 0.00011046999134123325, 0.00020519839017651975], "prob_new_token": [1.199260623252485e-06, 0.013298201374709606, 0.018011942505836487, 0.1498066931962967, 0.19456087052822113, 0.5608038306236267, 0.7674930095672607, 0.8661509156227112, 0.9143030643463135, 0.9451826214790344, 0.9650490880012512, 0.9775135517120361, 0.9852138161659241, 0.9899449944496155, 0.9928747415542603], "prob_old_token": [0.6364644765853882, 0.0023886580020189285, 0.0006024175090715289, 0.00502906134352088, 0.011069756001234055, 0.0034703535493463278, 0.0009816987439990044, 0.0003601658099796623, 0.0001667777105467394, 8.627133502159268e-05, 4.5622236939379945e-05, 2.452756234561093e-05, 1.3617172953672707e-05, 7.897823707025964e-06, 4.798095233127242e-06], "l1-model.layers.2.mlp.down_proj.weight": [92714.15625], "l2-model.layers.2.mlp.down_proj.weight": [15.72048282623291], "linf-model.layers.2.mlp.down_proj.weight": [0.006487022619694471], "request": {"prompt": "{} works in the field of", "subject": "Ferdinand T\u00f6nnies", "target_new": {"str": "performance art"}, "old_answer": {"str": "sociology"}, "seed": 42}}, {"loss_per_step": [12.501, 3.981, 0.685, 0.152, 0.079, 0.04, 0.022, 0.013, 0.008], "prob_new": [3.7219115256448276e-06, 0.018660133704543114, 0.5038876533508301, 0.8585994243621826, 0.9241333603858948, 0.9605885148048401, 0.9781990647315979, 0.9872506856918335, 0.9920108914375305], "prob_old": [0.8011013269424438, 0.3126530349254608, 0.028283413499593735, 0.026965783908963203, 0.02185061387717724, 0.01440731342881918, 0.012116238474845886, 0.01044024620205164, 0.008959216997027397], "prob_new_token": [3.7219115256448276e-06, 0.018660133704543114, 0.5038876533508301, 0.8585994243621826, 0.9241333603858948, 0.9605885148048401, 0.9781990647315979, 0.9872506856918335, 0.9920108914375305], "prob_old_token": [0.6364644765853882, 0.001139326486736536, 0.0001409255200996995, 3.7251946196192876e-05, 1.706862349237781e-05, 7.528075002483092e-06, 3.6455955978453858e-06, 1.6822928046167362e-06, 8.151461656780157e-07], "l1-model.layers.2.mlp.down_proj.weight": [74785.21875], "l2-model.layers.2.mlp.down_proj.weight": [12.294723510742188], "linf-model.layers.2.mlp.down_proj.weight": [0.003946371376514435], "request": {"prompt": "{} works in the field of", "subject": "Ferdinand T\u00f6nnies", "target_new": {"str": "architecture"}, "old_answer": {"str": "sociology"}, "seed": 42}}, {"loss_per_step": [11.007, 5.763, 3.693, 2.204, 1.409, 0.979, 0.406, 0.166, 0.09, 0.058, 0.038, 0.023, 0.015, 0.01], "prob_new": [3.2383202778873965e-05, 0.41332387924194336, 0.4382995069026947, 0.4705573320388794, 0.5132554769515991, 0.5597545504570007, 0.7191276550292969, 0.8574984669685364, 0.9175482988357544, 0.9453064203262329, 0.9636896252632141, 0.976982831954956, 0.985542893409729, 0.9904814958572388], "prob_old": [0.6585456132888794, 0.12027021497488022, 0.08072448521852493, 0.07738985121250153, 0.030828984454274178, 0.015173247084021568, 0.009875981137156487, 0.006964550353586674, 0.004092775285243988, 0.0022924926597625017, 0.0014916020445525646, 0.0012253671884536743, 0.0012455405667424202, 0.0014601326547563076], "prob_new_token": [4.568416898109717e-06, 1.1944285688514356e-05, 0.000707029365003109, 0.013118152506649494, 0.06191550940275192, 0.14489968121051788, 0.4490959048271179, 0.7222256064414978, 0.8412647247314453, 0.8962944149971008, 0.9325578808784485, 0.9584478139877319, 0.9747581481933594, 0.9839054346084595], "prob_old_token": [0.9329678416252136, 4.44634715677239e-05, 0.000777411216404289, 0.0005467455484904349, 5.2813666115980595e-05, 2.3281945686903782e-05, 5.262741069600452e-06, 1.9590681858971948e-06, 9.938703442458063e-07, 6.137573791420436e-07, 4.1258925875808927e-07, 2.9505187626455154e-07, 2.2441462022015912e-07, 1.8025274073352193e-07], "l1-model.layers.2.mlp.down_proj.weight": [92309.453125], "l2-model.layers.2.mlp.down_proj.weight": [15.446353912353516], "linf-model.layers.2.mlp.down_proj.weight": [0.006073987111449242], "request": {"prompt": "{} works in the field of", "subject": "Marina Abramovi\u0107", "target_new": {"str": "sociology"}, "old_answer": {"str": "performance art"}, "seed": 42}}, {"loss_per_step": [13.656, 7.766, 1.742, 0.416, 0.185, 0.113, 0.079, 0.057, 0.043, 0.033, 0.026, 0.021, 0.018, 0.015, 0.013, 0.012, 0.01, 0.009], "prob_new": [1.1725484228009009e-06, 0.0004237809043843299, 0.1750968098640442, 0.6595728993415833, 0.8313496112823486, 0.8928611278533936, 0.9241997599601746, 0.944465160369873, 0.9579504132270813, 0.9672906994819641, 0.9740021228790283, 0.978827714920044, 0.982275128364563, 0.9848349690437317, 0.9868564605712891, 0.9885162711143494, 0.9899012446403503, 0.9910650253295898], "prob_old": [0.6585456132888794, 0.11207510530948639, 0.10937400907278061, 0.12013058364391327, 0.10863287001848221, 0.0945570319890976, 0.08818265795707703, 0.09203366190195084, 0.09829213470220566, 0.09838104248046875, 0.09243196994066238, 0.08271579444408417, 0.07137041538953781, 0.060463711619377136, 0.05154900252819061, 0.04514583200216293, 0.040927596390247345, 0.038290802389383316], "prob_new_token": [1.1725484228009009e-06, 0.0004237809043843299, 0.1750968098640442, 0.6595728993415833, 0.8313496112823486, 0.8928611278533936, 0.9241997599601746, 0.944465160369873, 0.9579504132270813, 0.9672906994819641, 0.9740021228790283, 0.978827714920044, 0.982275128364563, 0.9848349690437317, 0.9868564605712891, 0.9885162711143494, 0.9899012446403503, 0.9910650253295898], "prob_old_token": [0.9329678416252136, 3.146897506667301e-05, 0.0012914645485579967, 0.00021012223442085087, 3.9477898098994046e-05, 1.3944417332822923e-05, 6.859563200123375e-06, 3.859928710880922e-06, 2.4190871954488102e-06, 1.6524453485544655e-06, 1.1971536650889902e-06, 9.091262995752913e-07, 7.203291829682712e-07, 5.875982651559752e-07, 4.856553914578399e-07, 4.0308350435225293e-07, 3.350547785885283e-07, 2.789677182590822e-07], "l1-model.layers.2.mlp.down_proj.weight": [96826.09375], "l2-model.layers.2.mlp.down_proj.weight": [16.471994400024414], "linf-model.layers.2.mlp.down_proj.weight": [0.0076807523146271706], "request": {"prompt": "{} works in the field of", "subject": "Marina Abramovi\u0107", "target_new": {"str": "architecture"}, "old_answer": {"str": "performance art"}, "seed": 42}}, {"loss_per_step": [5.665, 1.622, 0.33, 0.086, 0.021, 0.017, 0.015, 0.013, 0.011, 0.008], "prob_new": [0.003466708119958639, 0.1974632889032364, 0.7189545631408691, 0.9177286028862, 0.9791794419288635, 0.9827538728713989, 0.9847834706306458, 0.9867506623268127, 0.9890412092208862, 0.9917200207710266], "prob_old": [0.7119747400283813, 0.0024370155297219753, 0.00166012451518327, 0.0009946749778464437, 0.0001220383564941585, 6.22561783529818e-05, 4.9412978114560246e-05, 4.4163338316138834e-05, 3.974831270170398e-05, 3.372507853782736e-05], "prob_new_token": [0.003466708119958639, 0.1974632889032364, 0.7189545631408691, 0.9177286028862, 0.9791794419288635, 0.9827538728713989, 0.9847834706306458, 0.9867506623268127, 0.9890412092208862, 0.9917200207710266], "prob_old_token": [0.7119747400283813, 0.0024370155297219753, 0.00166012451518327, 0.0009946749778464437, 0.0001220383564941585, 6.22561783529818e-05, 4.9412978114560246e-05, 4.4163338316138834e-05, 3.974831270170398e-05, 3.372507853782736e-05], "l1-model.layers.2.mlp.down_proj.weight": [81455.8984375], "l2-model.layers.2.mlp.down_proj.weight": [13.143864631652832], "linf-model.layers.2.mlp.down_proj.weight": [0.0043587833642959595], "request": {"prompt": "{} is a virtuoso on the", "subject": "Robert Schumann", "target_new": {"str": "guitar"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [5.375, 3.948, 1.201, 0.304, 0.13, 0.066, 0.036, 0.022, 0.015, 0.011, 0.009], "prob_new": [0.05410191789269447, 0.16627255082130432, 0.5131893157958984, 0.7584975957870483, 0.8836575150489807, 0.9376938939094543, 0.9656816720962524, 0.9788857698440552, 0.9853030443191528, 0.9886569380760193, 0.9906313419342041], "prob_old": [0.7119747400283813, 0.012218835763633251, 0.003032566513866186, 1.0252134416077752e-05, 8.91560466698138e-06, 7.748871212243102e-06, 4.575035291054519e-06, 3.01668978863745e-06, 2.3113952920539305e-06, 1.9774295196839375e-06, 1.801674216039828e-06], "prob_new_token": [0.00019850555690936744, 0.0011232332326471806, 0.0975060984492302, 0.582453727722168, 0.7844200730323792, 0.8799085021018982, 0.9327540397644043, 0.9583424925804138, 0.9709822535514832, 0.977672278881073, 0.9816739559173584], "prob_old_token": [0.7119747400283813, 0.012218835763633251, 0.003032566513866186, 1.0252134416077752e-05, 8.91560466698138e-06, 7.748871212243102e-06, 4.575035291054519e-06, 3.01668978863745e-06, 2.3113952920539305e-06, 1.9774295196839375e-06, 1.801674216039828e-06], "l1-model.layers.2.mlp.down_proj.weight": [77474.9609375], "l2-model.layers.2.mlp.down_proj.weight": [13.400104522705078], "linf-model.layers.2.mlp.down_proj.weight": [0.00492827408015728], "request": {"prompt": "{} is a virtuoso on the", "subject": "Robert Schumann", "target_new": {"str": "sitar"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [2.06, 1.077, 0.798, 0.525, 0.227, 0.091, 0.05, 0.028, 0.014, 0.008], "prob_new": [0.7276716232299805, 0.7458016276359558, 0.7492518424987793, 0.7747063636779785, 0.8490133881568909, 0.9229380488395691, 0.9541378021240234, 0.9736026525497437, 0.9859849214553833, 0.9924099445343018], "prob_old": [0.7119747400283813, 0.017131205648183823, 0.031210359185934067, 0.03095424920320511, 0.009192295372486115, 0.0010241043055430055, 0.00047147407894954085, 0.00025668591842986643, 0.00012940219312440604, 6.768254388589412e-05], "prob_new_token": [0.0002891868643928319, 0.013878131285309792, 0.04309522360563278, 0.1256672888994217, 0.4079422056674957, 0.7001921534538269, 0.8235079646110535, 0.8989658951759338, 0.9467178583145142, 0.9714043140411377], "prob_old_token": [0.7119747400283813, 0.017131205648183823, 0.031210359185934067, 0.03095424920320511, 0.009192295372486115, 0.0010241043055430055, 0.00047147407894954085, 0.00025668591842986643, 0.00012940219312440604, 6.768254388589412e-05], "l1-model.layers.2.mlp.down_proj.weight": [79668.75], "l2-model.layers.2.mlp.down_proj.weight": [13.262565612792969], "linf-model.layers.2.mlp.down_proj.weight": [0.004506781697273254], "request": {"prompt": "{} is a virtuoso on the", "subject": "Robert Schumann", "target_new": {"str": "saxophone"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [1.559, 2.054, 1.014, 0.401, 0.161, 0.076, 0.04, 0.023, 0.015, 0.011, 0.008], "prob_new": [0.6652560830116272, 0.627109944820404, 0.6754166483879089, 0.7652246952056885, 0.8718924522399902, 0.9316393733024597, 0.9625715017318726, 0.9778901934623718, 0.9853025674819946, 0.9892481565475464, 0.9918216466903687], "prob_old": [0.6396934390068054, 9.310238237958401e-05, 0.00431179441511631, 0.0014849008293822408, 0.0005908045568503439, 0.00029514828929677606, 0.00019033810531254858, 0.000134746398543939, 9.924487676471472e-05, 7.602984987897798e-05, 6.028312418493442e-05], "prob_new_token": [0.009442240931093693, 0.0023997982498258352, 0.048765651881694794, 0.3016553521156311, 0.6205272674560547, 0.7989303469657898, 0.8910024166107178, 0.9364389777183533, 0.9583237171173096, 0.9699166417121887, 0.9774576425552368], "prob_old_token": [0.6396934390068054, 9.310238237958401e-05, 0.00431179441511631, 0.0014849008293822408, 0.0005908045568503439, 0.00029514828929677606, 0.00019033810531254858, 0.000134746398543939, 9.924487676471472e-05, 7.602984987897798e-05, 6.028312418493442e-05], "l1-model.layers.2.mlp.down_proj.weight": [79528.9453125], "l2-model.layers.2.mlp.down_proj.weight": [13.5613374710083], "linf-model.layers.2.mlp.down_proj.weight": [0.004933379590511322], "request": {"prompt": "{} is a virtuoso on the", "subject": "Jeff Goldblum", "target_new": {"str": "trumpet"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [4.309, 2.315, 0.935, 0.114, 0.052, 0.035, 0.024, 0.017, 0.012, 0.009], "prob_new": [0.08113676309585571, 0.24630384147167206, 0.5724407434463501, 0.8979713916778564, 0.9501244425773621, 0.9661287069320679, 0.9764635562896729, 0.9833701848983765, 0.9880653619766235, 0.9912782907485962], "prob_old": [0.6396934390068054, 0.014409312978386879, 0.002606246154755354, 0.00010312592348782346, 4.6208773710532114e-05, 2.38983120652847e-05, 1.1844442269648425e-05, 6.280004072323209e-06, 3.7626250559696928e-06, 2.5038805233634776e-06], "prob_new_token": [0.0011218603467568755, 0.020658476278185844, 0.1559070646762848, 0.8002244830131531, 0.9024339914321899, 0.9337159991264343, 0.9540262818336487, 0.9676399827003479, 0.976914644241333, 0.9832724332809448], "prob_old_token": [0.6396934390068054, 0.014409312978386879, 0.002606246154755354, 0.00010312592348782346, 4.6208773710532114e-05, 2.38983120652847e-05, 1.1844442269648425e-05, 6.280004072323209e-06, 3.7626250559696928e-06, 2.5038805233634776e-06], "l1-model.layers.2.mlp.down_proj.weight": [78251.5], "l2-model.layers.2.mlp.down_proj.weight": [12.98625373840332], "linf-model.layers.2.mlp.down_proj.weight": [0.004402075428515673], "request": {"prompt": "{} is a virtuoso on the", "subject": "Jeff Goldblum", "target_new": {"str": "sitar"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [3.756, 1.638, 0.795, 0.185, 0.026, 0.012, 0.007], "prob_new": [0.3286236524581909, 0.4789532423019409, 0.5922194719314575, 0.8449151515960693, 0.9747507572174072, 0.9886230230331421, 0.9933720231056213], "prob_old": [0.6396934390068054, 0.006798394024372101, 0.0008774138404987752, 0.00013825512723997235, 7.562150130979717e-05, 6.23917585471645e-05, 5.019978198106401e-05], "prob_new_token": [0.0008327914401888847, 0.04123089462518692, 0.20930905640125275, 0.6920881271362305, 0.9506332874298096, 0.9779892563819885, 0.9872964024543762], "prob_old_token": [0.6396934390068054, 0.006798394024372101, 0.0008774138404987752, 0.00013825512723997235, 7.562150130979717e-05, 6.23917585471645e-05, 5.019978198106401e-05], "l1-model.layers.2.mlp.down_proj.weight": [63438.03515625], "l2-model.layers.2.mlp.down_proj.weight": [10.435929298400879], "linf-model.layers.2.mlp.down_proj.weight": [0.002986311446875334], "request": {"prompt": "{} is a virtuoso on the", "subject": "Jeff Goldblum", "target_new": {"str": "flute"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [5.055, 3.714, 0.387, 0.109, 0.063, 0.048, 0.041, 0.036, 0.033, 0.029, 0.025, 0.022, 0.019, 0.016, 0.014, 0.012, 0.01, 0.009], "prob_new": [0.006377784069627523, 0.0243899654597044, 0.6792997717857361, 0.8970601558685303, 0.9390512704849243, 0.9531201124191284, 0.9600491523742676, 0.9644380211830139, 0.9680179953575134, 0.9715396761894226, 0.9750955104827881, 0.9784927368164062, 0.9815439581871033, 0.9841708540916443, 0.9863788485527039, 0.9882193207740784, 0.9897487163543701, 0.9910221099853516], "prob_old": [0.6505565047264099, 0.025607751682400703, 0.010194995440542698, 0.0020068110898137093, 0.0011574890231713653, 0.0009726815042085946, 0.0009719767258502543, 0.0010277886176481843, 0.0010522612137719989, 0.0010073981247842312, 0.0009016278781928122, 0.0007653417414985597, 0.0006273375474847853, 0.0005046962760388851, 0.00040344949229620397, 0.0003230891306884587, 0.0002605198824312538, 0.0002122061123372987], "prob_new_token": [0.006377784069627523, 0.0243899654597044, 0.6792997717857361, 0.8970601558685303, 0.9390512704849243, 0.9531201124191284, 0.9600491523742676, 0.9644380211830139, 0.9680179953575134, 0.9715396761894226, 0.9750955104827881, 0.9784927368164062, 0.9815439581871033, 0.9841708540916443, 0.9863788485527039, 0.9882193207740784, 0.9897487163543701, 0.9910221099853516], "prob_old_token": [0.6505565047264099, 0.025607751682400703, 0.010194995440542698, 0.0020068110898137093, 0.0011574890231713653, 0.0009726815042085946, 0.0009719767258502543, 0.0010277886176481843, 0.0010522612137719989, 0.0010073981247842312, 0.0009016278781928122, 0.0007653417414985597, 0.0006273375474847853, 0.0005046962760388851, 0.00040344949229620397, 0.0003230891306884587, 0.0002605198824312538, 0.0002122061123372987], "l1-model.layers.2.mlp.down_proj.weight": [98937.578125], "l2-model.layers.2.mlp.down_proj.weight": [16.60319709777832], "linf-model.layers.2.mlp.down_proj.weight": [0.008116967976093292], "request": {"prompt": "{} is a virtuoso on the", "subject": "Anton Rubinstein", "target_new": {"str": "organ"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [6.057, 2.804, 1.362, 0.535, 0.31, 0.078, 0.03, 0.014, 0.007], "prob_new": [0.03031427040696144, 0.12524931132793427, 0.43698951601982117, 0.6637166142463684, 0.7615935206413269, 0.925549328327179, 0.9704037308692932, 0.9865931272506714, 0.9926168918609619], "prob_old": [0.6505565047264099, 0.02368982508778572, 0.036829039454460144, 0.005532444920390844, 0.0007981465896591544, 0.0004926861147396266, 0.00032979092793539166, 0.00020504991698544472, 0.00012978074664715677], "prob_new_token": [9.050060907611623e-05, 0.015606065280735493, 0.08296173065900803, 0.35160717368125916, 0.5562495589256287, 0.8911235332489014, 0.9591530561447144, 0.9806109666824341, 0.9891286492347717], "prob_old_token": [0.6505565047264099, 0.02368982508778572, 0.036829039454460144, 0.005532444920390844, 0.0007981465896591544, 0.0004926861147396266, 0.00032979092793539166, 0.00020504991698544472, 0.00012978074664715677], "l1-model.layers.2.mlp.down_proj.weight": [74776.0078125], "l2-model.layers.2.mlp.down_proj.weight": [12.40048885345459], "linf-model.layers.2.mlp.down_proj.weight": [0.003943553194403648], "request": {"prompt": "{} is a virtuoso on the", "subject": "Anton Rubinstein", "target_new": {"str": "bass guitar"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [4.91, 4.32, 2.155, 0.849, 0.21, 0.065, 0.025, 0.011, 0.006], "prob_new": [0.4738840162754059, 0.4876196086406708, 0.49261125922203064, 0.5873071551322937, 0.8271409869194031, 0.9391058087348938, 0.9759354591369629, 0.9893146753311157, 0.9940661787986755], "prob_old": [0.6505565047264099, 0.012273154221475124, 0.005301079247146845, 0.004947364330291748, 0.00043454108526930213, 0.00011072072084061801, 3.7964073271723464e-05, 1.2514078662206884e-05, 4.651386916521005e-06], "prob_new_token": [5.736320235882886e-05, 0.00018148840172216296, 0.01384054683148861, 0.1851280778646469, 0.661808431148529, 0.884391725063324, 0.9565696716308594, 0.9824185371398926, 0.9912320971488953], "prob_old_token": [0.6505565047264099, 0.012273154221475124, 0.005301079247146845, 0.004947364330291748, 0.00043454108526930213, 0.00011072072084061801, 3.7964073271723464e-05, 1.2514078662206884e-05, 4.651386916521005e-06], "l1-model.layers.2.mlp.down_proj.weight": [69502.6171875], "l2-model.layers.2.mlp.down_proj.weight": [11.975522994995117], "linf-model.layers.2.mlp.down_proj.weight": [0.003963343799114227], "request": {"prompt": "{} is a virtuoso on the", "subject": "Anton Rubinstein", "target_new": {"str": "banjo"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [3.355, 2.342, 1.13, 0.784, 0.525, 0.362, 0.244, 0.1, 0.054, 0.031, 0.019, 0.013, 0.01, 0.008], "prob_new": [0.1562681794166565, 0.18695637583732605, 0.4474151134490967, 0.6005280017852783, 0.7152009010314941, 0.7676617503166199, 0.8144259452819824, 0.9077333211898804, 0.9480586051940918, 0.9699074029922485, 0.9817924499511719, 0.9868151545524597, 0.9896372556686401, 0.9917594790458679], "prob_old": [0.8571499586105347, 0.48930829763412476, 0.4993225038051605, 0.4991215467453003, 0.4986826479434967, 0.49825519323349, 0.4966140389442444, 0.4960515797138214, 0.49667754769325256, 0.4968694746494293, 0.496782511472702, 0.49685919284820557, 0.4973386526107788, 0.49781402945518494], "prob_new_token": [0.0010338777210563421, 0.011536501348018646, 0.3093206584453583, 0.6676272749900818, 0.9212599396705627, 0.9542482495307922, 0.9535825252532959, 0.9750362038612366, 0.9927332401275635, 0.9976178407669067, 0.9987237453460693, 0.9989003539085388, 0.9988967776298523, 0.9989443421363831], "prob_old_token": [0.7155489325523376, 3.1070921977516264e-05, 0.001591990701854229, 0.0003935171407647431, 0.00014996981190051883, 0.00015436115791089833, 0.00040778928087092936, 0.0003755203215405345, 0.0002296406455570832, 0.00015643144433852285, 0.00013456481974571943, 0.00014913815539330244, 0.00015718155191279948, 0.00013619894161820412], "l1-model.layers.2.mlp.down_proj.weight": [90053.453125], "l2-model.layers.2.mlp.down_proj.weight": [15.423185348510742], "linf-model.layers.2.mlp.down_proj.weight": [0.006198868155479431], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Tarja Halonen", "target_new": {"str": "Russian. The language"}, "old_answer": {"str": "Finnish"}, "seed": 42}}, {"loss_per_step": [3.718, 1.785, 1.714, 0.387, 0.2, 0.108, 0.061, 0.038, 0.026, 0.019, 0.015, 0.012, 0.011, 0.009], "prob_new": [0.22710631787776947, 0.3208843767642975, 0.23540720343589783, 0.679747462272644, 0.8207747340202332, 0.8977944850921631, 0.940843939781189, 0.9625175595283508, 0.9741636514663696, 0.9809035062789917, 0.9850039482116699, 0.9876366853713989, 0.9894450902938843, 0.9907922148704529], "prob_old": [0.8571499586105347, 0.49708715081214905, 0.4917476177215576, 0.4983121454715729, 0.4981125295162201, 0.4981103539466858, 0.49817541241645813, 0.498410165309906, 0.4987103044986725, 0.49898260831832886, 0.4992012083530426, 0.499368280172348, 0.49949270486831665, 0.4995851218700409], "prob_new_token": [0.0013007732341066003, 0.04740278050303459, 0.08392015099525452, 0.6517398953437805, 0.8731446862220764, 0.9274141788482666, 0.9518455266952515, 0.967025101184845, 0.9774182438850403, 0.9841980338096619, 0.9884111285209656, 0.9910262227058411, 0.9927157759666443, 0.993903636932373], "prob_old_token": [0.7155489325523376, 0.002337456215173006, 0.00023798503389116377, 0.00038208960904739797, 6.900476000737399e-05, 2.481438968970906e-05, 1.118113777920371e-05, 6.110245067247888e-06, 3.888915216521127e-06, 2.72978468274232e-06, 2.046625013463199e-06, 1.6417748156527523e-06, 1.414669554833381e-06, 1.2788910908057005e-06], "l1-model.layers.2.mlp.down_proj.weight": [83933.9609375], "l2-model.layers.2.mlp.down_proj.weight": [14.785201072692871], "linf-model.layers.2.mlp.down_proj.weight": [0.006085976958274841], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Tarja Halonen", "target_new": {"str": "German."}, "old_answer": {"str": "Finnish"}, "seed": 42}}, {"loss_per_step": [8.368, 5.086, 1.534, 0.691, 0.384, 0.202, 0.107, 0.068, 0.046, 0.032, 0.024, 0.019, 0.016, 0.013, 0.011, 0.01], "prob_new": [0.0002321712381672114, 0.0061849867925047874, 0.21560698747634888, 0.5008496046066284, 0.681118905544281, 0.8169685006141663, 0.8982751369476318, 0.9346794486045837, 0.9552565217018127, 0.9683155417442322, 0.9761059880256653, 0.9810422658920288, 0.9845514297485352, 0.9871448874473572, 0.98903888463974, 0.9904323220252991], "prob_old": [0.8571499586105347, 0.4954163432121277, 0.49266114830970764, 0.4960983097553253, 0.49682360887527466, 0.49678513407707214, 0.4968663156032562, 0.49703702330589294, 0.49727511405944824, 0.4975831210613251, 0.49790874123573303, 0.4981977641582489, 0.49843358993530273, 0.4986230731010437, 0.49877598881721497, 0.4989011287689209], "prob_new_token": [0.0002321712381672114, 0.0061849867925047874, 0.21560698747634888, 0.5008496046066284, 0.681118905544281, 0.8169685006141663, 0.8982751369476318, 0.9346794486045837, 0.9552565217018127, 0.9683155417442322, 0.9761059880256653, 0.9810422658920288, 0.9845514297485352, 0.9871448874473572, 0.98903888463974, 0.9904323220252991], "prob_old_token": [0.7155489325523376, 0.0003875649708788842, 9.1541551228147e-05, 9.233495802618563e-05, 3.653578460216522e-05, 1.9133407477056608e-05, 1.2276318557269406e-05, 8.138369594234973e-06, 5.816630164190428e-06, 4.432969944900833e-06, 3.6872677355859196e-06, 3.2278048820444383e-06, 2.8934241527167615e-06, 2.632307769090403e-06, 2.4231969746324467e-06, 2.246131998617784e-06], "l1-model.layers.2.mlp.down_proj.weight": [93311.046875], "l2-model.layers.2.mlp.down_proj.weight": [16.030088424682617], "linf-model.layers.2.mlp.down_proj.weight": [0.006880925036966801], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Tarja Halonen", "target_new": {"str": "Japanese"}, "old_answer": {"str": "Finnish"}, "seed": 42}}, {"loss_per_step": [5.643, 3.289, 2.446, 1.012, 0.14, 0.032, 0.017, 0.011, 0.008], "prob_new": [0.1221219152212143, 0.4100385904312134, 0.47698938846588135, 0.567842960357666, 0.8730534911155701, 0.969057023525238, 0.9830591678619385, 0.9886444807052612, 0.9916332960128784], "prob_old": [0.6000204682350159, 0.06321840733289719, 0.06571284681558609, 0.04718131199479103, 0.009114108048379421, 0.001487958594225347, 0.0007010467234067619, 0.00047752162208780646, 0.00032655513496138155], "prob_new_token": [3.6943125451216474e-05, 0.00018815540533978492, 0.001328689861111343, 0.0750933289527893, 0.7774287462234497, 0.9382454752922058, 0.9630565643310547, 0.9748753309249878, 0.981930136680603], "prob_old_token": [0.6000204682350159, 0.06321840733289719, 0.06571284681558609, 0.04718131199479103, 0.009114108048379421, 0.001487958594225347, 0.0007010467234067619, 0.00047752162208780646, 0.00032655513496138155], "l1-model.layers.2.mlp.down_proj.weight": [72220.4296875], "l2-model.layers.2.mlp.down_proj.weight": [12.213906288146973], "linf-model.layers.2.mlp.down_proj.weight": [0.003915848210453987], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Vladimir Vysotsky", "target_new": {"str": "Khmer."}, "old_answer": {"str": "Russian"}, "seed": 42}}, {"loss_per_step": [4.962, 1.803, 0.261, 0.18, 0.059, 0.023, 0.014, 0.01, 0.008], "prob_new": [0.3133165240287781, 0.49773523211479187, 0.7894939184188843, 0.8437310457229614, 0.9429913759231567, 0.9770593047142029, 0.9860053062438965, 0.9899023771286011, 0.9923273921012878], "prob_old": [0.6000204682350159, 0.07506059110164642, 0.007283112965524197, 0.0014286015648394823, 0.00030041925492696464, 9.977583977160975e-05, 5.56079758098349e-05, 3.932166509912349e-05, 3.022975397470873e-05], "prob_new_token": [7.811676186975092e-05, 0.028089046478271484, 0.6173150539398193, 0.7236741185188293, 0.9073370099067688, 0.9670832753181458, 0.9800081253051758, 0.9848805665969849, 0.9879733324050903], "prob_old_token": [0.6000204682350159, 0.07506059110164642, 0.007283112965524197, 0.0014286015648394823, 0.00030041925492696464, 9.977583977160975e-05, 5.56079758098349e-05, 3.932166509912349e-05, 3.022975397470873e-05], "l1-model.layers.2.mlp.down_proj.weight": [70994.0859375], "l2-model.layers.2.mlp.down_proj.weight": [12.13480281829834], "linf-model.layers.2.mlp.down_proj.weight": [0.0039334530010819435], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Vladimir Vysotsky", "target_new": {"str": "Serbian"}, "old_answer": {"str": "Russian"}, "seed": 42}}, {"loss_per_step": [4.135, 0.958, 0.822, 0.237, 0.14, 0.045, 0.03, 0.023, 0.018, 0.015, 0.012, 0.01], "prob_new": [0.49752283096313477, 0.5731230974197388, 0.5963860154151917, 0.8110659122467041, 0.8776881694793701, 0.9566692113876343, 0.9706441164016724, 0.9778217077255249, 0.9819962978363037, 0.9850647449493408, 0.9881268739700317, 0.9904196262359619], "prob_old": [0.6000204682350159, 0.180855393409729, 0.04201643541455269, 0.049562979489564896, 0.007324934937059879, 0.0016370151424780488, 0.0009206357644870877, 0.0006151184206828475, 0.0005052619380876422, 0.00045282297651283443, 0.0003628270933404565, 0.0002638342266436666], "prob_new_token": [0.000257354840869084, 0.14750784635543823, 0.19318272173404694, 0.6227458715438843, 0.7564316987991333, 0.913814127445221, 0.9416618943214417, 0.9559909105300903, 0.9643334746360779, 0.9704660773277283, 0.9765892624855042, 0.9811804294586182], "prob_old_token": [0.6000204682350159, 0.180855393409729, 0.04201643541455269, 0.049562979489564896, 0.007324934937059879, 0.0016370151424780488, 0.0009206357644870877, 0.0006151184206828475, 0.0005052619380876422, 0.00045282297651283443, 0.0003628270933404565, 0.0002638342266436666], "l1-model.layers.2.mlp.down_proj.weight": [83396.84375], "l2-model.layers.2.mlp.down_proj.weight": [14.070352554321289], "linf-model.layers.2.mlp.down_proj.weight": [0.0051598381251096725], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Vladimir Vysotsky", "target_new": {"str": "Hebrew"}, "old_answer": {"str": "Russian"}, "seed": 42}}, {"loss_per_step": [6.008, 4.869, 5.715, 3.085, 1.09, 0.153, 0.063, 0.039, 0.031, 0.027, 0.024, 0.022, 0.019, 0.016, 0.014, 0.012, 0.01, 0.009], "prob_new": [0.49568334221839905, 0.4931987524032593, 0.2599247395992279, 0.4953402578830719, 0.554341197013855, 0.868416428565979, 0.9404324293136597, 0.9619929194450378, 0.9697633385658264, 0.9734618663787842, 0.9761872291564941, 0.9789284467697144, 0.9816833138465881, 0.9842559695243835, 0.9865618348121643, 0.9885403513908386, 0.9900252819061279, 0.9910802841186523], "prob_old": [0.7656696438789368, 0.013202001340687275, 0.0029185789171606302, 0.03774581849575043, 0.0041289436630904675, 0.001465221750549972, 0.0004396526492200792, 0.00016919885820243508, 9.419544221600518e-05, 6.936395220691338e-05, 5.99858867644798e-05, 5.586273255175911e-05, 5.358032649382949e-05, 5.179383879294619e-05, 4.9868820497067645e-05, 4.7850193368503824e-05, 4.649930269806646e-05, 4.536297274171375e-05], "prob_new_token": [6.099346137489192e-06, 5.9806527133332565e-05, 2.09111203730572e-05, 0.0021141166798770428, 0.11371343582868576, 0.7377094030380249, 0.8817231059074402, 0.9249220490455627, 0.9405988454818726, 0.9481322765350342, 0.9536704421043396, 0.9591746926307678, 0.9646590948104858, 0.9697594046592712, 0.9743245840072632, 0.9782419800758362, 0.9811785221099854, 0.9832525849342346], "prob_old_token": [0.7656696438789368, 0.013202001340687275, 0.0029185789171606302, 0.03774581849575043, 0.0041289436630904675, 0.001465221750549972, 0.0004396526492200792, 0.00016919885820243508, 9.419544221600518e-05, 6.936395220691338e-05, 5.99858867644798e-05, 5.586273255175911e-05, 5.358032649382949e-05, 5.179383879294619e-05, 4.9868820497067645e-05, 4.7850193368503824e-05, 4.649930269806646e-05, 4.536297274171375e-05], "l1-model.layers.2.mlp.down_proj.weight": [90627.8125], "l2-model.layers.2.mlp.down_proj.weight": [16.1635799407959], "linf-model.layers.2.mlp.down_proj.weight": [0.007463123649358749], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Rafael Correa Delgado", "target_new": {"str": "Hungarian"}, "old_answer": {"str": "Spanish"}, "seed": 42}}, {"loss_per_step": [4.274, 3.517, 1.975, 0.499, 0.22, 0.081, 0.037, 0.022, 0.016, 0.012, 0.01], "prob_new": [0.33153167366981506, 0.3261524438858032, 0.467618465423584, 0.6833677291870117, 0.8216419816017151, 0.9248310327529907, 0.9643959999084473, 0.97826087474823, 0.9844433069229126, 0.9880596399307251, 0.9905332922935486], "prob_old": [0.7656696438789368, 0.3762069642543793, 0.016560085117816925, 0.005732946563512087, 0.000980363111011684, 0.00014158790872897953, 4.860381886828691e-05, 2.798075547616463e-05, 1.944066389114596e-05, 1.3862858395441435e-05, 9.875444447970949e-06], "prob_new_token": [0.0002925312437582761, 0.0013533231103792787, 0.02106611430644989, 0.3691490888595581, 0.6452279686927795, 0.8512169122695923, 0.9300330877304077, 0.9575367569923401, 0.9697675108909607, 0.976951003074646, 0.9818817973136902], "prob_old_token": [0.7656696438789368, 0.3762069642543793, 0.016560085117816925, 0.005732946563512087, 0.000980363111011684, 0.00014158790872897953, 4.860381886828691e-05, 2.798075547616463e-05, 1.944066389114596e-05, 1.3862858395441435e-05, 9.875444447970949e-06], "l1-model.layers.2.mlp.down_proj.weight": [78193.90625], "l2-model.layers.2.mlp.down_proj.weight": [13.511856079101562], "linf-model.layers.2.mlp.down_proj.weight": [0.004836831241846085], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Rafael Correa Delgado", "target_new": {"str": "Hindi"}, "old_answer": {"str": "Spanish"}, "seed": 42}}, {"loss_per_step": [6.71, 5.64, 0.646, 0.108, 0.091, 0.08, 0.061, 0.046, 0.035, 0.027, 0.021, 0.017, 0.013, 0.011, 0.009], "prob_new": [0.0012190506095066667, 0.003553571878001094, 0.5243463516235352, 0.8971993923187256, 0.9128997921943665, 0.9234768152236938, 0.9408974051475525, 0.9549010992050171, 0.9653773903846741, 0.9731035232543945, 0.9789572954177856, 0.9834662079811096, 0.9868800044059753, 0.9893991351127625, 0.991239607334137], "prob_old": [0.7656696438789368, 0.030265361070632935, 0.0021304013207554817, 0.0003358020621817559, 0.0005738248582929373, 0.000705399492289871, 0.0005954928346909583, 0.0004436048911884427, 0.00031443158513866365, 0.0002195186825701967, 0.00015270209405571222, 0.00010662114073056728, 7.55366709199734e-05, 5.487888483912684e-05, 4.113391332793981e-05], "prob_new_token": [0.0012190506095066667, 0.003553571878001094, 0.5243463516235352, 0.8971993923187256, 0.9128997921943665, 0.9234768152236938, 0.9408974051475525, 0.9549010992050171, 0.9653773903846741, 0.9731035232543945, 0.9789572954177856, 0.9834662079811096, 0.9868800044059753, 0.9893991351127625, 0.991239607334137], "prob_old_token": [0.7656696438789368, 0.030265361070632935, 0.0021304013207554817, 0.0003358020621817559, 0.0005738248582929373, 0.000705399492289871, 0.0005954928346909583, 0.0004436048911884427, 0.00031443158513866365, 0.0002195186825701967, 0.00015270209405571222, 0.00010662114073056728, 7.55366709199734e-05, 5.487888483912684e-05, 4.113391332793981e-05], "l1-model.layers.2.mlp.down_proj.weight": [90691.4140625], "l2-model.layers.2.mlp.down_proj.weight": [15.5040283203125], "linf-model.layers.2.mlp.down_proj.weight": [0.006805740296840668], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Rafael Correa Delgado", "target_new": {"str": "German"}, "old_answer": {"str": "Spanish"}, "seed": 42}}, {"loss_per_step": [1.088, 0.476, 0.161, 0.073, 0.038, 0.021, 0.015, 0.011, 0.008], "prob_new": [0.44663283228874207, 0.6869928240776062, 0.8606969714164734, 0.9331777691841125, 0.9643810391426086, 0.9794510006904602, 0.9855509996414185, 0.9892757534980774, 0.9918302893638611], "prob_old": [0.815902590751648, 0.3908531069755554, 0.5392011404037476, 0.5843627452850342, 0.6109630465507507, 0.6294764876365662, 0.6381226778030396, 0.6439887285232544, 0.6492228507995605], "prob_new_token": [0.6513559222221375, 0.2500663101673126, 0.6493955850601196, 0.783964216709137, 0.8663716316223145, 0.918479859828949, 0.9424595236778259, 0.9580734968185425, 0.9694727063179016], "prob_old_token": [0.651356041431427, 0.2500663101673126, 0.649396538734436, 0.7839667797088623, 0.8663716912269592, 0.9184795618057251, 0.9424595236778259, 0.9580737352371216, 0.9694725871086121], "l1-model.layers.2.mlp.down_proj.weight": [78249.8671875], "l2-model.layers.2.mlp.down_proj.weight": [12.533076286315918], "linf-model.layers.2.mlp.down_proj.weight": [0.003963625058531761], "request": {"prompt": "{} was bestowed with the", "subject": "Dag Hammarskj\u00f6ld", "target_new": {"str": "Nobel Prize in Literature"}, "old_answer": {"str": "Nobel Peace Prize"}, "seed": 42}}, {"loss_per_step": [1.387, 0.392, 0.099, 0.045, 0.024, 0.014, 0.01], "prob_new": [0.4124806523323059, 0.706883430480957, 0.9078486561775208, 0.9569570422172546, 0.9767681360244751, 0.9859487414360046, 0.9901949167251587], "prob_old": [0.8459617495536804, 0.4923056960105896, 0.6308585405349731, 0.6293370723724365, 0.6424404978752136, 0.650662362575531, 0.6557480096817017], "prob_new_token": [0.7021856307983398, 0.4221096634864807, 0.8392304182052612, 0.8958942294120789, 0.9366311430931091, 0.9588463306427002, 0.972826361656189], "prob_old_token": [0.7021856307983398, 0.4221096634864807, 0.8392304182052612, 0.8958942294120789, 0.9366311430931091, 0.9588463306427002, 0.972826361656189], "l1-model.layers.2.mlp.down_proj.weight": [65280.16015625], "l2-model.layers.2.mlp.down_proj.weight": [10.693889617919922], "linf-model.layers.2.mlp.down_proj.weight": [0.0030101314187049866], "request": {"prompt": "{} was bestowed with the", "subject": "European Union", "target_new": {"str": "Nobel Prize in Literature"}, "old_answer": {"str": "Nobel Peace Prize"}, "seed": 42}}, {"loss_per_step": [1.301, 1.235, 0.221, 0.146, 0.094, 0.068, 0.047, 0.035, 0.022, 0.009], "prob_new": [0.4182564318180084, 0.6336379647254944, 0.8178811073303223, 0.8752855658531189, 0.9163886904716492, 0.9381643533706665, 0.9559008479118347, 0.9672836661338806, 0.9785060882568359, 0.9911811947822571], "prob_old": [0.8223134875297546, 0.3171447813510895, 0.5097625851631165, 0.5367236733436584, 0.5655516982078552, 0.5811865329742432, 0.6015846729278564, 0.6163495779037476, 0.6323857307434082, 0.6523721218109131], "prob_new_token": [0.615616500377655, 0.005888555198907852, 0.5696443915367126, 0.6455538272857666, 0.7297772765159607, 0.77521812915802, 0.8331459760665894, 0.8741922378540039, 0.9173339605331421, 0.9684421420097351], "prob_old_token": [0.6156161427497864, 0.005888571497052908, 0.5696437358856201, 0.64555424451828, 0.7297772765159607, 0.7752185463905334, 0.8331458568572998, 0.8741927742958069, 0.9173334240913391, 0.968441903591156], "l1-model.layers.2.mlp.down_proj.weight": [78608.390625], "l2-model.layers.2.mlp.down_proj.weight": [13.087813377380371], "linf-model.layers.2.mlp.down_proj.weight": [0.004507637582719326], "request": {"prompt": "{} was bestowed with the", "subject": "Bertha von Suttner", "target_new": {"str": "Nobel Prize in Literature"}, "old_answer": {"str": "Nobel Peace Prize"}, "seed": 42}}, {"loss_per_step": [7.671, 5.4, 3.591, 2.083, 0.481, 0.155, 0.042, 0.015, 0.008], "prob_new": [0.00046604787348769605, 0.004516266286373138, 0.02757185511291027, 0.12458866089582443, 0.6179662942886353, 0.8563077449798584, 0.9590212106704712, 0.9849108457565308, 0.9920042157173157], "prob_old": [0.6538368463516235, 0.5098759531974792, 0.519773006439209, 0.4698762893676758, 0.4561377465724945, 0.4654005169868469, 0.4850143492221832, 0.5156890749931335, 0.5379276275634766], "prob_new_token": [0.00046604787348769605, 0.004516266286373138, 0.02757185511291027, 0.12458866089582443, 0.6179662942886353, 0.8563077449798584, 0.9590212106704712, 0.9849108457565308, 0.9920042157173157], "prob_old_token": [0.28406721353530884, 0.02284056320786476, 0.016855955123901367, 0.006578285712748766, 0.00024962532916106284, 2.1583349735010415e-05, 6.733986083418131e-06, 2.988669848491554e-06, 1.961288944585249e-06], "l1-model.layers.2.mlp.down_proj.weight": [68087.84375], "l2-model.layers.2.mlp.down_proj.weight": [11.86131477355957], "linf-model.layers.2.mlp.down_proj.weight": [0.003933034837245941], "request": {"prompt": "{} has a citizenship of", "subject": "George Washington", "target_new": {"str": "Brazil"}, "old_answer": {"str": "the United States of America"}, "seed": 42}}, {"loss_per_step": [4.016, 3.791, 2.387, 1.229, 0.207, 0.027, 0.004], "prob_new": [0.4206976592540741, 0.3816569745540619, 0.5008800625801086, 0.5421023964881897, 0.8301821947097778, 0.9734625220298767, 0.9959924221038818], "prob_old": [0.6538368463516235, 0.45475107431411743, 0.5567821264266968, 0.5142314434051514, 0.5938940644264221, 0.5934011340141296, 0.6046844720840454], "prob_new_token": [0.00038631714414805174, 0.0006677808123640716, 0.008501681499183178, 0.08577229082584381, 0.6615357995033264, 0.9475504755973816, 0.9922218322753906], "prob_old_token": [0.28406721353530884, 0.006319259759038687, 0.16322605311870575, 0.08182108402252197, 0.002803659997880459, 3.487638969090767e-05, 7.064954388624756e-06], "l1-model.layers.2.mlp.down_proj.weight": [56437.89453125], "l2-model.layers.2.mlp.down_proj.weight": [9.952516555786133], "linf-model.layers.2.mlp.down_proj.weight": [0.003003810066729784], "request": {"prompt": "{} has a citizenship of", "subject": "George Washington", "target_new": {"str": "Denmark"}, "old_answer": {"str": "the United States of America"}, "seed": 42}}, {"loss_per_step": [8.706, 5.179, 4.676, 3.731, 2.85, 1.527, 0.431, 0.094, 0.037, 0.02, 0.013, 0.01, 0.008], "prob_new": [0.00016564593533985317, 0.005631175357848406, 0.009317141026258469, 0.02396159991621971, 0.05784212052822113, 0.21725520491600037, 0.6501640677452087, 0.9102103114128113, 0.9639877080917358, 0.9802796840667725, 0.9868727326393127, 0.9899654388427734, 0.9916563630104065], "prob_old": [0.6538368463516235, 0.5454328656196594, 0.5270454287528992, 0.5596799254417419, 0.5113187432289124, 0.45826250314712524, 0.41423460841178894, 0.38526105880737305, 0.3752829134464264, 0.3705013692378998, 0.36754414439201355, 0.3662738800048828, 0.3664862811565399], "prob_new_token": [0.00016564593533985317, 0.005631175357848406, 0.009317141026258469, 0.02396159991621971, 0.05784212052822113, 0.21725520491600037, 0.6501640677452087, 0.9102103114128113, 0.9639877080917358, 0.9802796840667725, 0.9868727326393127, 0.9899654388427734, 0.9916563630104065], "prob_old_token": [0.28406721353530884, 0.006861500442028046, 0.0812445655465126, 0.014641782268881798, 0.008065566420555115, 0.004675436764955521, 0.0023344734217971563, 0.0006800815463066101, 0.0002385139960097149, 0.00011786170944105834, 7.786744390614331e-05, 6.023313108016737e-05, 4.9834208766696975e-05], "l1-model.layers.2.mlp.down_proj.weight": [80583.2421875], "l2-model.layers.2.mlp.down_proj.weight": [13.993592262268066], "linf-model.layers.2.mlp.down_proj.weight": [0.005519692320376635], "request": {"prompt": "{} has a citizenship of", "subject": "George Washington", "target_new": {"str": "Netherlands"}, "old_answer": {"str": "the United States of America"}, "seed": 42}}, {"loss_per_step": [6.243, 4.766, 1.025, 0.269, 0.15, 0.099, 0.068, 0.047, 0.032, 0.022, 0.015, 0.01], "prob_new": [0.0019431750988587737, 0.008518301881849766, 0.35889214277267456, 0.7641896605491638, 0.8610197305679321, 0.9056846499443054, 0.9343510866165161, 0.9544517397880554, 0.9683631062507629, 0.9781853556632996, 0.9852831363677979, 0.9902608394622803], "prob_old": [0.8033087253570557, 0.0018501975573599339, 0.00492509501054883, 0.0007648561731912196, 0.0003613238222897053, 0.0002921156119555235, 0.00025517510948702693, 0.00022380432346835732, 0.00019394065020605922, 0.0001630467304494232, 0.00013105322432238609, 0.00010047930118162185], "prob_new_token": [0.0019431750988587737, 0.008518301881849766, 0.35889214277267456, 0.7641896605491638, 0.8610197305679321, 0.9056846499443054, 0.9343510866165161, 0.9544517397880554, 0.9683631062507629, 0.9781853556632996, 0.9852831363677979, 0.9902608394622803], "prob_old_token": [0.8033087253570557, 0.0018501975573599339, 0.00492509501054883, 0.0007648561731912196, 0.0003613238222897053, 0.0002921156119555235, 0.00025517510948702693, 0.00022380432346835732, 0.00019394065020605922, 0.0001630467304494232, 0.00013105322432238609, 0.00010047930118162185], "l1-model.layers.2.mlp.down_proj.weight": [81961.21875], "l2-model.layers.2.mlp.down_proj.weight": [13.98644733428955], "linf-model.layers.2.mlp.down_proj.weight": [0.005415882915258408], "request": {"prompt": "{} has a citizenship of", "subject": "Eduardo Frei Montalva", "target_new": {"str": "Argentina"}, "old_answer": {"str": "Chile"}, "seed": 42}}, {"loss_per_step": [9.596, 4.214, 0.679, 0.017, 0.007], "prob_new": [6.798121466999874e-05, 0.01478190254420042, 0.5069202184677124, 0.983435332775116, 0.9926071166992188], "prob_old": [0.8033087253570557, 0.0023982974234968424, 0.00648850854486227, 1.699809217825532e-05, 6.469011168519501e-06], "prob_new_token": [6.798121466999874e-05, 0.01478190254420042, 0.5069202184677124, 0.983435332775116, 0.9926071166992188], "prob_old_token": [0.8033087253570557, 0.0023982974234968424, 0.00648850854486227, 1.699809217825532e-05, 6.469011168519501e-06], "l1-model.layers.2.mlp.down_proj.weight": [49764.15625], "l2-model.layers.2.mlp.down_proj.weight": [8.254613876342773], "linf-model.layers.2.mlp.down_proj.weight": [0.002001602202653885], "request": {"prompt": "{} has a citizenship of", "subject": "Eduardo Frei Montalva", "target_new": {"str": "Egypt"}, "old_answer": {"str": "Chile"}, "seed": 42}}, {"loss_per_step": [12.105, 4.322, 9.276, 0.904, 0.337, 0.149, 0.082, 0.053, 0.039, 0.031, 0.025, 0.02, 0.017, 0.014, 0.012, 0.01, 0.009], "prob_new": [5.530110229301499e-06, 0.013270677998661995, 9.364503785036504e-05, 0.40502625703811646, 0.7139778137207031, 0.8615191578865051, 0.9216722846031189, 0.9482277035713196, 0.9617199897766113, 0.9699063897132874, 0.9756994843482971, 0.9800723791122437, 0.9834505319595337, 0.9860870242118835, 0.9881601333618164, 0.989804744720459, 0.9911248087882996], "prob_old": [0.8033087253570557, 0.030397284775972366, 3.2035015919973375e-06, 0.0002388410794083029, 0.0001347547658951953, 7.597055810038e-05, 3.313228444312699e-05, 1.4665824892290402e-05, 7.316277333302423e-06, 4.128326963837026e-06, 2.5668762191344285e-06, 1.728284814817016e-06, 1.2412721162036178e-06, 9.378937306792068e-07, 7.368959700215783e-07, 5.965670766272524e-07, 4.942880877933931e-07], "prob_new_token": [5.530110229301499e-06, 0.013270677998661995, 9.364503785036504e-05, 0.40502625703811646, 0.7139778137207031, 0.8615191578865051, 0.9216722846031189, 0.9482277035713196, 0.9617199897766113, 0.9699063897132874, 0.9756994843482971, 0.9800723791122437, 0.9834505319595337, 0.9860870242118835, 0.9881601333618164, 0.989804744720459, 0.9911248087882996], "prob_old_token": [0.8033087253570557, 0.030397284775972366, 3.2035015919973375e-06, 0.0002388410794083029, 0.0001347547658951953, 7.597055810038e-05, 3.313228444312699e-05, 1.4665824892290402e-05, 7.316277333302423e-06, 4.128326963837026e-06, 2.5668762191344285e-06, 1.728284814817016e-06, 1.2412721162036178e-06, 9.378937306792068e-07, 7.368959700215783e-07, 5.965670766272524e-07, 4.942880877933931e-07], "l1-model.layers.2.mlp.down_proj.weight": [90163.84375], "l2-model.layers.2.mlp.down_proj.weight": [15.825419425964355], "linf-model.layers.2.mlp.down_proj.weight": [0.007371929939836264], "request": {"prompt": "{} has a citizenship of", "subject": "Eduardo Frei Montalva", "target_new": {"str": "Ukraine"}, "old_answer": {"str": "Chile"}, "seed": 42}}, {"loss_per_step": [13.192, 5.422, 4.298, 3.808, 2.242, 0.907, 0.149, 0.016, 0.004], "prob_new": [1.8659325178305153e-06, 0.0044185142032802105, 0.01359497383236885, 0.022182509303092957, 0.10626780241727829, 0.40367716550827026, 0.8619361519813538, 0.9839348793029785, 0.9958083629608154], "prob_old": [0.9169411659240723, 0.6670956611633301, 0.7304970622062683, 0.7384466528892517, 0.7433908581733704, 0.7355358600616455, 0.717158317565918, 0.6944862008094788, 0.6690729856491089], "prob_new_token": [1.8659325178305153e-06, 0.0044185142032802105, 0.01359497383236885, 0.022182509303092957, 0.10626780241727829, 0.40367716550827026, 0.8619361519813538, 0.9839348793029785, 0.9958083629608154], "prob_old_token": [0.7120962738990784, 0.009213265962898731, 0.007065278477966785, 0.006312772631645203, 0.0018172316486015916, 0.000525658659171313, 8.655614510644227e-05, 6.478722752945032e-06, 6.21683113877225e-07], "l1-model.layers.2.mlp.down_proj.weight": [71555.875], "l2-model.layers.2.mlp.down_proj.weight": [12.194372177124023], "linf-model.layers.2.mlp.down_proj.weight": [0.003949391655623913], "request": {"prompt": "{} borders with", "subject": "Bahrain", "target_new": {"str": "Ukraine"}, "old_answer": {"str": "Saudi Arabia"}, "seed": 42}}, {"loss_per_step": [6.385, 3.338, 1.292, 0.673, 0.259, 0.102, 0.042, 0.02, 0.011, 0.007], "prob_new": [0.327322781085968, 0.3768993616104126, 0.6346802711486816, 0.705119252204895, 0.8185938596725464, 0.9115204215049744, 0.9600326418876648, 0.9806598424911499, 0.9894791841506958, 0.9934642314910889], "prob_old": [0.9169411659240723, 0.7428303956985474, 0.6311832666397095, 0.5359045267105103, 0.5182095170021057, 0.5090973973274231, 0.5068343281745911, 0.5068461894989014, 0.5068333745002747, 0.506296694278717], "prob_new_token": [2.312546257599024e-06, 0.00032795799779705703, 0.02352689392864704, 0.13531909883022308, 0.4621563255786896, 0.7381787896156311, 0.8826590776443481, 0.9439769387245178, 0.9700936079025269, 0.9818266034126282], "prob_old_token": [0.7120962738990784, 0.0234635379165411, 0.0028376581612974405, 0.0001914239692268893, 7.44662102079019e-05, 2.3988668544916436e-05, 6.837155069661094e-06, 2.2765500489185797e-06, 9.606280855223304e-07, 4.989069566363469e-07], "l1-model.layers.2.mlp.down_proj.weight": [72386.2734375], "l2-model.layers.2.mlp.down_proj.weight": [12.598114967346191], "linf-model.layers.2.mlp.down_proj.weight": [0.004397127777338028], "request": {"prompt": "{} borders with", "subject": "Bahrain", "target_new": {"str": "Beijing"}, "old_answer": {"str": "Saudi Arabia"}, "seed": 42}}, {"loss_per_step": [6.077, 2.593, 2.246, 1.324, 0.226, 0.008], "prob_new": [0.39425405859947205, 0.4986991286277771, 0.50483238697052, 0.5350097417831421, 0.8180841207504272, 0.992385745048523], "prob_old": [0.9169411659240723, 0.7149748802185059, 0.6975926160812378, 0.7436502575874329, 0.7398498058319092, 0.7404724955558777], "prob_new_token": [6.678090812783921e-06, 0.005635499954223633, 0.011214089579880238, 0.07084327191114426, 0.6367084383964539, 0.9850277900695801], "prob_old_token": [0.7120962738990784, 0.009345781989395618, 0.007619364652782679, 0.00852304045110941, 0.0004343294131103903, 2.990892608067952e-05], "l1-model.layers.2.mlp.down_proj.weight": [57242.1328125], "l2-model.layers.2.mlp.down_proj.weight": [9.378846168518066], "linf-model.layers.2.mlp.down_proj.weight": [0.002510981634259224], "request": {"prompt": "{} borders with", "subject": "Bahrain", "target_new": {"str": "Thailand"}, "old_answer": {"str": "Saudi Arabia"}, "seed": 42}}, {"loss_per_step": [10.462, 1.922, 1.07, 0.536, 0.201, 0.078, 0.046, 0.035, 0.03, 0.027, 0.024, 0.022, 0.019, 0.017, 0.014, 0.012, 0.011, 0.009], "prob_new": [2.8606737032532692e-05, 0.1463506817817688, 0.3430020809173584, 0.5850442051887512, 0.8177975416183472, 0.9248906373977661, 0.9552623629570007, 0.9653929471969604, 0.9701421856880188, 0.9732614159584045, 0.9759485721588135, 0.9785570502281189, 0.981106698513031, 0.9835277795791626, 0.9857602119445801, 0.9877661466598511, 0.9895229339599609, 0.9910215735435486], "prob_old": [0.8442697525024414, 0.5693507790565491, 0.5779095888137817, 0.5214504599571228, 0.5112704038619995, 0.5025380849838257, 0.4939194619655609, 0.48721036314964294, 0.48233574628829956, 0.47849246859550476, 0.47525399923324585, 0.4725358486175537, 0.4696248769760132, 0.46646708250045776, 0.463450163602829, 0.46055564284324646, 0.45828139781951904, 0.456394761800766], "prob_new_token": [2.8606737032532692e-05, 0.1463506817817688, 0.3430020809173584, 0.5850442051887512, 0.8177975416183472, 0.9248906373977661, 0.9552623629570007, 0.9653929471969604, 0.9701421856880188, 0.9732614159584045, 0.9759485721588135, 0.9785570502281189, 0.981106698513031, 0.9835277795791626, 0.9857602119445801, 0.9877661466598511, 0.9895229339599609, 0.9910215735435486], "prob_old_token": [0.412433922290802, 0.09120213985443115, 0.1491069793701172, 0.020571721717715263, 0.0117466626688838, 0.0073844799771904945, 0.0057515352964401245, 0.005175942089408636, 0.004912387579679489, 0.00468231039121747, 0.0043841395527124405, 0.004004451911896467, 0.003570436267182231, 0.0031172505114227533, 0.0026741574984043837, 0.0022628067526966333, 0.0018969037337228656, 0.0015828333562240005], "l1-model.layers.2.mlp.down_proj.weight": [100506.765625], "l2-model.layers.2.mlp.down_proj.weight": [16.82183265686035], "linf-model.layers.2.mlp.down_proj.weight": [0.007675372064113617], "request": {"prompt": "{} borders with", "subject": "Oman", "target_new": {"str": "India"}, "old_answer": {"str": "the United Arab Emirates"}, "seed": 42}}, {"loss_per_step": [13.771, 5.779, 7.967, 3.773, 1.582, 0.523, 0.194, 0.08, 0.039, 0.023, 0.016, 0.011, 0.008], "prob_new": [1.0455619303684216e-06, 0.003092806087806821, 0.0003466167545411736, 0.022989418357610703, 0.2056252360343933, 0.5929391384124756, 0.8236867189407349, 0.9228648543357849, 0.9613527655601501, 0.9770350456237793, 0.9845795631408691, 0.9888636469841003, 0.991569995880127], "prob_old": [0.8442697525024414, 0.6262177228927612, 0.15306371450424194, 0.02141493931412697, 0.06900604814291, 0.11889319866895676, 0.11930258572101593, 0.11660248041152954, 0.11501100659370422, 0.11295680701732635, 0.10978489369153976, 0.10541209578514099, 0.0999988242983818], "prob_new_token": [1.0455619303684216e-06, 0.003092806087806821, 0.0003466167545411736, 0.022989418357610703, 0.2056252360343933, 0.5929391384124756, 0.8236867189407349, 0.9228648543357849, 0.9613527655601501, 0.9770350456237793, 0.9845795631408691, 0.9888636469841003, 0.991569995880127], "prob_old_token": [0.412433922290802, 0.06399668753147125, 0.06704941391944885, 0.10562460124492645, 0.12521512806415558, 0.14342597126960754, 0.07103219628334045, 0.02620410919189453, 0.00949716754257679, 0.00388263794593513, 0.0018574832938611507, 0.0010208191815763712, 0.0006260516820475459], "l1-model.layers.2.mlp.down_proj.weight": [83795.921875], "l2-model.layers.2.mlp.down_proj.weight": [14.759793281555176], "linf-model.layers.2.mlp.down_proj.weight": [0.005503654945641756], "request": {"prompt": "{} borders with", "subject": "Oman", "target_new": {"str": "Mexico"}, "old_answer": {"str": "the United Arab Emirates"}, "seed": 42}}, {"loss_per_step": [0.37, 0.343, 0.028, 0.006], "prob_new": [0.7990957498550415, 0.8098423480987549, 0.9731857776641846, 0.9937640428543091], "prob_old": [0.8442697525024414, 0.6511978507041931, 0.6610500812530518, 0.6480912566184998], "prob_new_token": [0.23768770694732666, 0.2587285041809082, 0.9031485915184021, 0.9834877252578735], "prob_old_token": [0.412433922290802, 0.16529592871665955, 0.042572021484375, 0.009795739315450191], "l1-model.layers.2.mlp.down_proj.weight": [40200.453125], "l2-model.layers.2.mlp.down_proj.weight": [6.751911163330078], "linf-model.layers.2.mlp.down_proj.weight": [0.0015023681335151196], "request": {"prompt": "{} borders with", "subject": "Oman", "target_new": {"str": "Saudi Arabia"}, "old_answer": {"str": "the United Arab Emirates"}, "seed": 42}}, {"loss_per_step": [1.808, 1.213, 0.557, 0.155, 0.045, 0.022, 0.016, 0.01, 0.008], "prob_new": [0.38338664174079895, 0.5230331420898438, 0.6757742166519165, 0.869105875492096, 0.9578874707221985, 0.978276252746582, 0.9840246438980103, 0.9897782206535339, 0.992122232913971], "prob_old": [0.6396514177322388, 0.4239521026611328, 0.3658026158809662, 0.4077216684818268, 0.42663222551345825, 0.43978849053382874, 0.4488407373428345, 0.4551973342895508, 0.45994555950164795], "prob_new_token": [0.08499715477228165, 0.20799748599529266, 0.3217919170856476, 0.5979527235031128, 0.8243182897567749, 0.9056758284568787, 0.9385862946510315, 0.956440806388855, 0.96779865026474], "prob_old_token": [0.7084969282150269, 0.17621755599975586, 0.04671543464064598, 0.007227231748402119, 0.0015987064689397812, 0.0005390791920945048, 0.00018870133499149233, 7.595100032631308e-05, 3.853545422316529e-05], "l1-model.layers.2.mlp.down_proj.weight": [73535.984375], "l2-model.layers.2.mlp.down_proj.weight": [12.302000999450684], "linf-model.layers.2.mlp.down_proj.weight": [0.00401721615344286], "request": {"prompt": "{} has earned an educational degree from", "subject": "Bill Gates", "target_new": {"str": "the University of California, Los Angeles"}, "old_answer": {"str": "Harvard University"}, "seed": 42}}, {"loss_per_step": [2.934, 1.244, 0.543, 0.202, 0.086, 0.045, 0.033, 0.023, 0.017, 0.014, 0.012, 0.011, 0.01], "prob_new": [0.36576762795448303, 0.5615458488464355, 0.6796489953994751, 0.8421180844306946, 0.9233959317207336, 0.9579164385795593, 0.9684870839118958, 0.977268397808075, 0.9830896258354187, 0.9862384796142578, 0.9881414771080017, 0.9894949197769165, 0.9905859231948853], "prob_old": [0.6396514177322388, 0.4185681939125061, 0.3974153995513916, 0.4074714779853821, 0.42133110761642456, 0.43073707818984985, 0.43629077076911926, 0.44015392661094666, 0.44324129819869995, 0.44597962498664856, 0.4485393464565277, 0.45096907019615173, 0.4532760977745056], "prob_new_token": [0.0849967896938324, 0.1956840455532074, 0.38027265667915344, 0.5135675072669983, 0.7363173961639404, 0.8602281212806702, 0.9083511233329773, 0.9357695579528809, 0.9514774680137634, 0.960566520690918, 0.9663397669792175, 0.9704800248146057, 0.9737815260887146], "prob_old_token": [0.7084969282150269, 0.24072876572608948, 0.004855383187532425, 0.0005134678212925792, 0.00037599477218464017, 0.0002680536708794534, 0.00019046866509597749, 0.00015451520448550582, 0.00013833664706908166, 0.00012862287985626608, 0.00012005928147118539, 0.0001111304372898303, 0.0001018482944346033], "l1-model.layers.2.mlp.down_proj.weight": [88727.375], "l2-model.layers.2.mlp.down_proj.weight": [14.774426460266113], "linf-model.layers.2.mlp.down_proj.weight": [0.005783192813396454], "request": {"prompt": "{} has earned an educational degree from", "subject": "Bill Gates", "target_new": {"str": "the University of Bristol"}, "old_answer": {"str": "Harvard University"}, "seed": 42}}, {"loss_per_step": [2.593, 0.665, 0.287, 0.097, 0.064, 0.024, 0.018, 0.014, 0.011, 0.008], "prob_new": [0.27795514464378357, 0.6332604289054871, 0.7861050963401794, 0.9156326055526733, 0.9397377967834473, 0.976523220539093, 0.9823668599128723, 0.9863432049751282, 0.9893563985824585, 0.9916326403617859], "prob_old": [0.6396514177322388, 0.4601495862007141, 0.4169193208217621, 0.45169562101364136, 0.4651953876018524, 0.4654209613800049, 0.46590113639831543, 0.46697890758514404, 0.4685104489326477, 0.47031331062316895], "prob_new_token": [0.0849967896938324, 0.15623222291469574, 0.4488823711872101, 0.7142133116722107, 0.8790979385375977, 0.9229245781898499, 0.9423354268074036, 0.9553647637367249, 0.9654466509819031, 0.9732421636581421], "prob_old_token": [0.7084969282150269, 0.297370582818985, 0.004045207519084215, 0.0009604385122656822, 0.0001254162343684584, 7.082463707774878e-05, 4.6620967623312026e-05, 3.420902794459835e-05, 2.5490111511317082e-05, 1.875172347354237e-05], "l1-model.layers.2.mlp.down_proj.weight": [84760.28125], "l2-model.layers.2.mlp.down_proj.weight": [13.297353744506836], "linf-model.layers.2.mlp.down_proj.weight": [0.004441512748599052], "request": {"prompt": "{} has earned an educational degree from", "subject": "Bill Gates", "target_new": {"str": "the University of Cambridge"}, "old_answer": {"str": "Harvard University"}, "seed": 42}}, {"loss_per_step": [1.218, 0.561, 0.217, 0.073, 0.037, 0.021, 0.013, 0.009], "prob_new": [0.5217112302780151, 0.6308975219726562, 0.8339908123016357, 0.9344998002052307, 0.9647167325019836, 0.9794151782989502, 0.9871224164962769, 0.9913550615310669], "prob_old": [0.6436144113540649, 0.42015740275382996, 0.5587884187698364, 0.6752088665962219, 0.7096668481826782, 0.726218044757843, 0.7357531785964966, 0.7411993145942688], "prob_new_token": [0.5371917486190796, 0.21306858956813812, 0.421779602766037, 0.7423849105834961, 0.8618386387825012, 0.9214882254600525, 0.9567416310310364, 0.9763475656509399], "prob_old_token": [0.53719162940979, 0.21306867897510529, 0.4217786192893982, 0.7423855662345886, 0.8618386387825012, 0.9214882254600525, 0.9567419290542603, 0.9763477444648743], "l1-model.layers.2.mlp.down_proj.weight": [69831.1015625], "l2-model.layers.2.mlp.down_proj.weight": [11.406094551086426], "linf-model.layers.2.mlp.down_proj.weight": [0.0035011754371225834], "request": {"prompt": "{} has earned an educational degree from", "subject": "Frank Herbert", "target_new": {"str": "the University of California, Berkeley"}, "old_answer": {"str": "the University of Washington"}, "seed": 42}}, {"loss_per_step": [2.252, 0.913, 0.383, 0.192, 0.035, 0.014, 0.008], "prob_new": [0.5689975023269653, 0.5423032641410828, 0.721158504486084, 0.857607364654541, 0.966951847076416, 0.9863281846046448, 0.991747260093689], "prob_old": [0.6436144113540649, 0.5161730051040649, 0.5579851269721985, 0.6079012155532837, 0.7170265913009644, 0.7363834381103516, 0.7418135404586792], "prob_new_token": [0.53719162940979, 0.29049938917160034, 0.39313921332359314, 0.4982428550720215, 0.8945071697235107, 0.9599831700325012, 0.977155327796936], "prob_old_token": [0.53719162940979, 0.29049938917160034, 0.39313921332359314, 0.4982428550720215, 0.8945071697235107, 0.9599831700325012, 0.977155327796936], "l1-model.layers.2.mlp.down_proj.weight": [64604.72265625], "l2-model.layers.2.mlp.down_proj.weight": [10.58121109008789], "linf-model.layers.2.mlp.down_proj.weight": [0.0029871147125959396], "request": {"prompt": "{} has earned an educational degree from", "subject": "Frank Herbert", "target_new": {"str": "the University of Texas"}, "old_answer": {"str": "the University of Washington"}, "seed": 42}}, {"loss_per_step": [2.435, 1.579, 1.131, 0.195, 0.06, 0.03, 0.014, 0.008], "prob_new": [0.6940945386886597, 0.6117875576019287, 0.6810110807418823, 0.8401358127593994, 0.9463853240013123, 0.9720656275749207, 0.9865314960479736, 0.9918160438537598], "prob_old": [0.6436144113540649, 0.4286632537841797, 0.5213172435760498, 0.6053690910339355, 0.6709529161453247, 0.7085362672805786, 0.7302166223526001, 0.7382156848907471], "prob_new_token": [0.5371917486190796, 0.3580264151096344, 0.6311665177345276, 0.6308014988899231, 0.7463017702102661, 0.8654681444168091, 0.9405220150947571, 0.9668999910354614], "prob_old_token": [0.53719162940979, 0.35802602767944336, 0.6311662197113037, 0.6308022737503052, 0.7463023662567139, 0.8654687404632568, 0.940521240234375, 0.9668999910354614], "l1-model.layers.2.mlp.down_proj.weight": [67038.1875], "l2-model.layers.2.mlp.down_proj.weight": [11.418547630310059], "linf-model.layers.2.mlp.down_proj.weight": [0.003489854745566845], "request": {"prompt": "{} has earned an educational degree from", "subject": "Frank Herbert", "target_new": {"str": "the University of Bucharest"}, "old_answer": {"str": "the University of Washington"}, "seed": 42}}, {"loss_per_step": [4.539, 2.422, 0.392, 0.148, 0.071, 0.033, 0.018, 0.01], "prob_new": [0.48854687809944153, 0.44052746891975403, 0.7248095870018005, 0.8707541227340698, 0.9333330392837524, 0.9680798649787903, 0.982410728931427, 0.9901317358016968], "prob_old": [0.9215955138206482, 0.04161079227924347, 0.0033930903300642967, 0.0002454107452649623, 2.9213635571068153e-05, 5.237715868133819e-06, 1.607315311957791e-06, 6.225077413546387e-07], "prob_new_token": [0.00011676352005451918, 0.009027089923620224, 0.4627629220485687, 0.7490964531898499, 0.871389627456665, 0.9393596053123474, 0.9673123359680176, 0.9822762608528137], "prob_old_token": [0.9215955138206482, 0.04161079227924347, 0.0033930903300642967, 0.0002454107452649623, 2.9213635571068153e-05, 5.237715868133819e-06, 1.607315311957791e-06, 6.225077413546387e-07], "l1-model.layers.2.mlp.down_proj.weight": [63679.0078125], "l2-model.layers.2.mlp.down_proj.weight": [11.0746488571167], "linf-model.layers.2.mlp.down_proj.weight": [0.003481566905975342], "request": {"prompt": "The native language of {} is", "subject": "Robin van Persie", "target_new": {"str": "Arabic"}, "old_answer": {"str": "Dutch"}, "seed": 42}}, {"loss_per_step": [4.331, 2.382, 1.026, 0.268, 0.024, 0.02, 0.014, 0.009], "prob_new": [0.6566707491874695, 0.6079081892967224, 0.6229406595230103, 0.7795271873474121, 0.9763233065605164, 0.9808779954910278, 0.9866763353347778, 0.9907347559928894], "prob_old": [0.9215955138206482, 0.020086539909243584, 0.0002781541843432933, 2.4361092073377222e-05, 2.943889739981387e-06, 2.99904286293895e-06, 2.0328216123743914e-06, 1.2137860494476627e-06], "prob_new_token": [2.3428087843058165e-06, 0.0009518639417365193, 0.05647594481706619, 0.6262562870979309, 0.9521130323410034, 0.9503785967826843, 0.9660157561302185, 0.9776454567909241], "prob_old_token": [0.9215955138206482, 0.020086539909243584, 0.0002781541843432933, 2.4361092073377222e-05, 2.943889739981387e-06, 2.99904286293895e-06, 2.0328216123743914e-06, 1.2137860494476627e-06], "l1-model.layers.2.mlp.down_proj.weight": [63221.96875], "l2-model.layers.2.mlp.down_proj.weight": [11.012364387512207], "linf-model.layers.2.mlp.down_proj.weight": [0.0034970715641975403], "request": {"prompt": "The native language of {} is", "subject": "Robin van Persie", "target_new": {"str": "Kurdish"}, "old_answer": {"str": "Dutch"}, "seed": 42}}, {"loss_per_step": [4.866, 2.863, 1.861, 0.977, 0.133, 0.064, 0.035, 0.022, 0.014, 0.01], "prob_new": [0.29197633266448975, 0.3974733054637909, 0.6168375611305237, 0.619231104850769, 0.8824930191040039, 0.9406710863113403, 0.9664976000785828, 0.9788122177124023, 0.9859596490859985, 0.990301787853241], "prob_old": [0.9215955138206482, 0.07328760623931885, 0.12608285248279572, 0.0012840956915169954, 0.0002018576196860522, 0.00010537695197854191, 5.83329820074141e-05, 3.6933932278770953e-05, 2.5136481781373732e-05, 1.8364075003773905e-05], "prob_new_token": [3.0160324968164787e-05, 0.0009337002411484718, 0.004437553230673075, 0.06734438240528107, 0.7367047071456909, 0.8504113554954529, 0.9111590385437012, 0.9427387714385986, 0.9619174599647522, 0.973783016204834], "prob_old_token": [0.9215955138206482, 0.07328760623931885, 0.12608285248279572, 0.0012840956915169954, 0.0002018576196860522, 0.00010537695197854191, 5.83329820074141e-05, 3.6933932278770953e-05, 2.5136481781373732e-05, 1.8364075003773905e-05], "l1-model.layers.2.mlp.down_proj.weight": [72874.046875], "l2-model.layers.2.mlp.down_proj.weight": [12.615249633789062], "linf-model.layers.2.mlp.down_proj.weight": [0.0043707843869924545], "request": {"prompt": "The native language of {} is", "subject": "Robin van Persie", "target_new": {"str": "Uzbek"}, "old_answer": {"str": "Dutch"}, "seed": 42}}, {"loss_per_step": [3.176, 1.487, 0.285, 0.018, 0.014, 0.012, 0.01, 0.008], "prob_new": [0.4383750557899475, 0.6611502170562744, 0.7906080484390259, 0.9821188449859619, 0.9861464500427246, 0.9878987073898315, 0.9898853302001953, 0.9920088052749634], "prob_old": [0.9290962219238281, 0.0020240393932908773, 0.001108126831240952, 0.0002150144864572212, 0.000169599981745705, 0.00016672491619829088, 0.0001449596747988835, 0.00011281483602942899], "prob_new_token": [0.00023026124108582735, 0.011898777447640896, 0.472969651222229, 0.9476695656776428, 0.9597716331481934, 0.9652580618858337, 0.9712193608283997, 0.9774527549743652], "prob_old_token": [0.9290962219238281, 0.0020240393932908773, 0.001108126831240952, 0.0002150144864572212, 0.000169599981745705, 0.00016672491619829088, 0.0001449596747988835, 0.00011281483602942899], "l1-model.layers.2.mlp.down_proj.weight": [63802.86328125], "l2-model.layers.2.mlp.down_proj.weight": [11.052769660949707], "linf-model.layers.2.mlp.down_proj.weight": [0.00345108425244689], "request": {"prompt": "The native language of {} is", "subject": "Monica Bellucci", "target_new": {"str": "Latvian"}, "old_answer": {"str": "Italian"}, "seed": 42}}, {"loss_per_step": [6.123, 1.825, 0.467, 0.02, 0.016, 0.012, 0.008], "prob_new": [0.4781739413738251, 0.5050527453422546, 0.6568570733070374, 0.9801578521728516, 0.9840957522392273, 0.9884148836135864, 0.9921904802322388], "prob_old": [0.9290962219238281, 0.0004901930224150419, 3.2914536859607324e-05, 2.515641062927898e-05, 2.2046162484912202e-05, 1.6559026335016824e-05, 9.640131793275941e-06], "prob_new_token": [5.023955509386724e-06, 0.026440579444169998, 0.4615993797779083, 0.9609397649765015, 0.9685950875282288, 0.9771697521209717, 0.9846621751785278], "prob_old_token": [0.9290962219238281, 0.0004901930224150419, 3.2914536859607324e-05, 2.515641062927898e-05, 2.2046162484912202e-05, 1.6559026335016824e-05, 9.640131793275941e-06], "l1-model.layers.2.mlp.down_proj.weight": [56033.6640625], "l2-model.layers.2.mlp.down_proj.weight": [9.729230880737305], "linf-model.layers.2.mlp.down_proj.weight": [0.0029652640223503113], "request": {"prompt": "The native language of {} is", "subject": "Monica Bellucci", "target_new": {"str": "Bengali"}, "old_answer": {"str": "Italian"}, "seed": 42}}, {"loss_per_step": [5.685, 1.795, 0.073, 0.006], "prob_new": [0.49420082569122314, 0.5129671096801758, 0.9321977496147156, 0.9936410188674927], "prob_old": [0.9290962219238281, 0.039414551109075546, 0.010167211294174194, 0.0007586678257212043], "prob_new_token": [1.1662390534183942e-05, 0.027658861130475998, 0.8650462627410889, 0.9873077273368835], "prob_old_token": [0.9290962219238281, 0.039414551109075546, 0.010167211294174194, 0.0007586678257212043], "l1-model.layers.2.mlp.down_proj.weight": [37557.29296875], "l2-model.layers.2.mlp.down_proj.weight": [6.589227676391602], "linf-model.layers.2.mlp.down_proj.weight": [0.001502467319369316], "request": {"prompt": "The native language of {} is", "subject": "Monica Bellucci", "target_new": {"str": "Hebrew"}, "old_answer": {"str": "Italian"}, "seed": 42}}, {"loss_per_step": [8.288, 4.851, 3.442, 2.503, 1.579, 0.545, 0.065, 0.017, 0.01], "prob_new": [0.13263864815235138, 0.2914460599422455, 0.33504509925842285, 0.43093669414520264, 0.6398646235466003, 0.6676592826843262, 0.9381836652755737, 0.9836098551750183, 0.9902373552322388], "prob_old": [0.9271687269210815, 0.006335977930575609, 0.0012947204522788525, 0.001063465024344623, 0.0009078066213987768, 0.0008068307652138174, 4.4814329157816246e-05, 2.3259948648046702e-05, 3.410604040254839e-05], "prob_new_token": [1.7212462566362774e-08, 4.4163338316138834e-05, 0.0005937507376074791, 0.001832888345234096, 0.009621483273804188, 0.26329419016838074, 0.9437463283538818, 0.979574978351593, 0.9824045300483704], "prob_old_token": [0.8750066161155701, 0.0002515181549824774, 0.001432710443623364, 0.0006269030272960663, 0.00028522685170173645, 0.0012206216342747211, 3.9592818211531267e-05, 4.178169547230937e-06, 1.3927956388215534e-06], "l1-model.layers.2.mlp.down_proj.weight": [70611.828125], "l2-model.layers.2.mlp.down_proj.weight": [12.040019035339355], "linf-model.layers.2.mlp.down_proj.weight": [0.003965212032198906], "request": {"prompt": "{} is named in honor of", "subject": "St. Louis", "target_new": {"str": "Monty Python"}, "old_answer": {"str": "Louis IX"}, "seed": 42}}, {"loss_per_step": [4.041, 2.136, 0.794, 0.199, 0.055, 0.027, 0.017, 0.011, 0.008], "prob_new": [0.43936824798583984, 0.4912714958190918, 0.6471983194351196, 0.8408417105674744, 0.9481293559074402, 0.973727822303772, 0.9835308194160461, 0.9887701869010925, 0.991809070110321], "prob_old": [0.9271687269210815, 0.4724181294441223, 0.3307214677333832, 0.36201611161231995, 0.35992735624313354, 0.319470077753067, 0.2841823101043701, 0.26461973786354065, 0.25462251901626587], "prob_new_token": [2.26958636631025e-05, 0.01133374311029911, 0.18742907047271729, 0.6571425199508667, 0.9156785607337952, 0.9630572199821472, 0.9750146865844727, 0.9804332852363586, 0.9840030074119568], "prob_old_token": [0.8750066161155701, 0.0022363876923918724, 0.0001856622693594545, 7.229768380057067e-05, 5.370931830839254e-06, 7.61221315315197e-07, 3.352001840539742e-07, 2.3411928395944415e-07, 1.8746590058071888e-07], "l1-model.layers.2.mlp.down_proj.weight": [74205.890625], "l2-model.layers.2.mlp.down_proj.weight": [12.384859085083008], "linf-model.layers.2.mlp.down_proj.weight": [0.003982176538556814], "request": {"prompt": "{} is named in honor of", "subject": "St. Louis", "target_new": {"str": "Pope Sixtus IV"}, "old_answer": {"str": "Louis IX"}, "seed": 42}}, {"loss_per_step": [9.03, 3.559, 1.497, 0.569, 0.245, 0.111, 0.049, 0.022, 0.01], "prob_new": [0.2290521115064621, 0.26266875863075256, 0.5260201692581177, 0.6753619313240051, 0.8327174186706543, 0.9064431190490723, 0.9538918137550354, 0.9789709448814392, 0.9901832342147827], "prob_old": [0.9271687269210815, 0.0014492303598672152, 0.0019151263404637575, 0.0008345083915628493, 0.0007238888647407293, 0.0012889408972114325, 0.0015798852546140552, 0.0015800944529473782, 0.001516820164397359], "prob_new_token": [1.5089844964677468e-06, 0.0009824343724176288, 0.00933520495891571, 0.1960519254207611, 0.40563246607780457, 0.6684339642524719, 0.8502392768859863, 0.9358745813369751, 0.9715704321861267], "prob_old_token": [0.8750066161155701, 0.002583610126748681, 0.003065153257921338, 0.0006716669304296374, 8.870702004060149e-05, 5.077914465800859e-06, 4.5371487544798583e-07, 6.475072211742372e-08, 1.3793036046649831e-08], "l1-model.layers.2.mlp.down_proj.weight": [76059.671875], "l2-model.layers.2.mlp.down_proj.weight": [12.520318984985352], "linf-model.layers.2.mlp.down_proj.weight": [0.003985132556408644], "request": {"prompt": "{} is named in honor of", "subject": "St. Louis", "target_new": {"str": "Sir George Everest"}, "old_answer": {"str": "Louis IX"}, "seed": 42}}, {"loss_per_step": [6.576, 4.275, 3.811, 3.208, 2.266, 1.309, 0.458, 0.077, 0.028, 0.023, 0.013, 0.008], "prob_new": [0.0038403940852731466, 0.03192538768053055, 0.15172472596168518, 0.3319711685180664, 0.3532620668411255, 0.5367336273193359, 0.7026066184043884, 0.9291312098503113, 0.972244143486023, 0.9769965410232544, 0.9871480464935303, 0.9918508529663086], "prob_old": [0.8951084017753601, 0.4512184262275696, 0.3990441560745239, 0.38041719794273376, 0.15136437118053436, 0.3291266858577728, 0.46858176589012146, 0.5212151408195496, 0.5259751081466675, 0.49592190980911255, 0.44132721424102783, 0.39174604415893555], "prob_new_token": [8.593811799073592e-05, 0.01024622656404972, 0.004220458213239908, 0.008332296274602413, 0.008974137715995312, 0.03363737463951111, 0.31585144996643066, 0.8247223496437073, 0.9394543766975403, 0.951043426990509, 0.9771907925605774, 0.986404538154602], "prob_old_token": [0.7112005949020386, 0.0007887287065386772, 4.144344347878359e-05, 0.000597309204749763, 0.00011124336742796004, 6.792275962652639e-05, 1.8965454728459008e-05, 1.4286368923421833e-06, 1.8316430328013666e-07, 6.325487333924684e-08, 2.8860769774041728e-08, 1.765970658595961e-08], "l1-model.layers.2.mlp.down_proj.weight": [80979.125], "l2-model.layers.2.mlp.down_proj.weight": [14.179580688476562], "linf-model.layers.2.mlp.down_proj.weight": [0.005307566374540329], "request": {"prompt": "{} is named in honor of", "subject": "Columbia", "target_new": {"str": "Mentha"}, "old_answer": {"str": "Christopher Columbus"}, "seed": 42}}, {"loss_per_step": [5.228, 3.061, 1.977, 0.904, 0.32, 0.106, 0.054, 0.027, 0.015, 0.009], "prob_new": [0.22864583134651184, 0.46751946210861206, 0.4947326183319092, 0.5863347053527832, 0.7482591867446899, 0.901841402053833, 0.9482553005218506, 0.9736579656600952, 0.9853123426437378, 0.9906481504440308], "prob_old": [0.8951084017753601, 0.6151940226554871, 0.5795300006866455, 0.3442769944667816, 0.3388696610927582, 0.4096772074699402, 0.48178812861442566, 0.50635826587677, 0.5084595680236816, 0.49657416343688965], "prob_new_token": [0.10144669562578201, 0.5153284072875977, 0.5191887021064758, 0.6504068374633789, 0.674482524394989, 0.8708853721618652, 0.9348502159118652, 0.96023029088974, 0.9730035066604614, 0.9806297421455383], "prob_old_token": [0.7112005949020386, 0.0013515695463865995, 0.0006817728863097727, 4.2711155401775613e-05, 3.222427039872855e-05, 1.362377929581271e-06, 3.98912476384794e-07, 2.4028440748224966e-07, 1.4747860177521943e-07, 8.538084728115791e-08], "l1-model.layers.2.mlp.down_proj.weight": [78525.59375], "l2-model.layers.2.mlp.down_proj.weight": [13.224483489990234], "linf-model.layers.2.mlp.down_proj.weight": [0.004422429017722607], "request": {"prompt": "{} is named in honor of", "subject": "Columbia", "target_new": {"str": "the Kazakh people"}, "old_answer": {"str": "Christopher Columbus"}, "seed": 42}}, {"loss_per_step": [7.351, 4.955, 2.727, 2.291, 1.381, 1.095, 0.845, 0.373, 0.02, 0.003], "prob_new": [0.3051206171512604, 0.3320554494857788, 0.518708348274231, 0.6082559823989868, 0.4506259858608246, 0.6607693433761597, 0.6886634826660156, 0.7733039259910583, 0.9805213809013367, 0.996728777885437], "prob_old": [0.8951084017753601, 0.37520286440849304, 0.5849093198776245, 0.685208261013031, 0.41692304611206055, 0.47461599111557007, 0.47238999605178833, 0.4603534936904907, 0.44832634925842285, 0.4192206859588623], "prob_new_token": [1.5171211771303206e-06, 0.00010901049972744659, 0.000501627626363188, 0.0012567276135087013, 0.05145978182554245, 0.03973067179322243, 0.08037260174751282, 0.32945239543914795, 0.9473477602005005, 0.995003879070282], "prob_old_token": [0.7112005949020386, 0.0017196114640682936, 0.004220293369144201, 0.14280074834823608, 0.000538426626008004, 0.002668568165972829, 0.0025068949908018112, 0.0006904959445819259, 2.042250707745552e-05, 8.290732580462645e-07], "l1-model.layers.2.mlp.down_proj.weight": [70327.828125], "l2-model.layers.2.mlp.down_proj.weight": [12.460465431213379], "linf-model.layers.2.mlp.down_proj.weight": [0.0044929105788469315], "request": {"prompt": "{} is named in honor of", "subject": "Columbia", "target_new": {"str": "Friedrich Mohs"}, "old_answer": {"str": "Christopher Columbus"}, "seed": 42}}, {"loss_per_step": [5.278, 1.532, 0.059, 0.007], "prob_new": [0.4562312364578247, 0.24695412814617157, 0.9433889389038086, 0.9934749603271484], "prob_old": [0.9135269522666931, 0.7571236491203308, 0.7641002535820007, 0.7712334990501404], "prob_new_token": [2.8525771995191462e-05, 0.12750521302223206, 0.8986578583717346, 0.988176703453064], "prob_old_token": [0.6618219614028931, 0.0016113297315314412, 2.0873243556707166e-05, 1.69013060258294e-06], "l1-model.layers.2.mlp.down_proj.weight": [42090.3203125], "l2-model.layers.2.mlp.down_proj.weight": [6.915120601654053], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024496242403984], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Dalai Lama", "target_new": {"str": "Christianity"}, "old_answer": {"str": "Tibetan Buddhism"}, "seed": 42}}, {"loss_per_step": [2.407, 0.862, 0.899, 0.131, 0.062, 0.039, 0.028, 0.021, 0.016, 0.012, 0.01], "prob_new": [0.5771450400352478, 0.7593253254890442, 0.6553688645362854, 0.9002701640129089, 0.9455966949462891, 0.9641575813293457, 0.9736854434013367, 0.9800493121147156, 0.9846791625022888, 0.9880871772766113, 0.9905683398246765], "prob_old": [0.9135269522666931, 0.7472465634346008, 0.6242444515228271, 0.6347761154174805, 0.667085587978363, 0.6833841800689697, 0.6969733238220215, 0.7101256251335144, 0.7216662764549255, 0.7304463386535645, 0.7362942099571228], "prob_new_token": [0.0009396239765919745, 0.016973746940493584, 0.038843121379613876, 0.5426690578460693, 0.7545233368873596, 0.8426281809806824, 0.887438178062439, 0.9169460535049438, 0.9378567934036255, 0.9526740312576294, 0.963135838508606], "prob_old_token": [0.6618219614028931, 0.02330060675740242, 0.0002960916026495397, 8.295596489915624e-05, 2.3157075702329166e-05, 1.0449155524838716e-05, 6.029729775036685e-06, 3.936982466257177e-06, 2.736796659519314e-06, 1.9706390048668254e-06, 1.4512661437038332e-06], "l1-model.layers.2.mlp.down_proj.weight": [77933.6015625], "l2-model.layers.2.mlp.down_proj.weight": [13.47555160522461], "linf-model.layers.2.mlp.down_proj.weight": [0.004869163502007723], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Dalai Lama", "target_new": {"str": "Zoroastrianism"}, "old_answer": {"str": "Tibetan Buddhism"}, "seed": 42}}, {"loss_per_step": [9.177, 0.463, 0.047, 0.004], "prob_new": [0.00010335681145079434, 0.6295371055603027, 0.954252302646637, 0.9962028861045837], "prob_old": [0.9135269522666931, 0.7520045638084412, 0.7707200050354004, 0.7850169539451599], "prob_new_token": [0.00010335681145079434, 0.6295371055603027, 0.954252302646637, 0.9962028861045837], "prob_old_token": [0.6618219614028931, 0.006842220202088356, 1.484323888689687e-06, 4.0017198443820234e-08], "l1-model.layers.2.mlp.down_proj.weight": [44615.83203125], "l2-model.layers.2.mlp.down_proj.weight": [7.0814056396484375], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024422900751233], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Dalai Lama", "target_new": {"str": "Islam"}, "old_answer": {"str": "Tibetan Buddhism"}, "seed": 42}}, {"loss_per_step": [10.037, 3.887, 0.114, 0.017, 0.009], "prob_new": [4.3743333662860096e-05, 0.020496752113103867, 0.8924767374992371, 0.9830088019371033, 0.9908154606819153], "prob_old": [0.8717825412750244, 0.6214247345924377, 0.6085590124130249, 0.6495347023010254, 0.6596038341522217], "prob_new_token": [4.3743333662860096e-05, 0.020496752113103867, 0.8924767374992371, 0.9830088019371033, 0.9908154606819153], "prob_old_token": [0.6194280385971069, 0.006328457035124302, 0.0010725813917815685, 0.0005626083584502339, 0.0005080676055513322], "l1-model.layers.2.mlp.down_proj.weight": [49282.8515625], "l2-model.layers.2.mlp.down_proj.weight": [8.242534637451172], "linf-model.layers.2.mlp.down_proj.weight": [0.002004283480346203], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Leonard Nimoy", "target_new": {"str": "Islam"}, "old_answer": {"str": "Judaism"}, "seed": 42}}, {"loss_per_step": [2.441, 1.466, 0.023, 0.013, 0.005], "prob_new": [0.5318131446838379, 0.6332133412361145, 0.9772706031799316, 0.9868876338005066, 0.9948427081108093], "prob_old": [0.8717825412750244, 0.28084903955459595, 0.6079041957855225, 0.640773594379425, 0.6520527601242065], "prob_new_token": [0.6194280385971069, 0.013857525773346424, 0.9354937672615051, 0.9620994329452515, 0.9854444265365601], "prob_old_token": [0.6194280385971069, 0.013857525773346424, 0.9354937672615051, 0.9620994329452515, 0.9854444265365601], "l1-model.layers.2.mlp.down_proj.weight": [48715.515625], "l2-model.layers.2.mlp.down_proj.weight": [8.217752456665039], "linf-model.layers.2.mlp.down_proj.weight": [0.002005279064178467], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Leonard Nimoy", "target_new": {"str": "Jainism"}, "old_answer": {"str": "Judaism"}, "seed": 42}}, {"loss_per_step": [2.763, 0.339, 0.031, 0.001], "prob_new": [0.4531806409358978, 0.7777729034423828, 0.970467746257782, 0.9985596537590027], "prob_old": [0.8717825412750244, 0.365843266248703, 0.33392056822776794, 0.34112998843193054], "prob_new_token": [0.0006639091880060732, 0.3790062367916107, 0.9118455052375793, 0.9958342909812927], "prob_old_token": [0.6194280385971069, 0.01926984265446663, 0.0007365616620518267, 0.00010984002437908202], "l1-model.layers.2.mlp.down_proj.weight": [44267.390625], "l2-model.layers.2.mlp.down_proj.weight": [7.046169757843018], "linf-model.layers.2.mlp.down_proj.weight": [0.0015024561434984207], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Leonard Nimoy", "target_new": {"str": "Hinduism"}, "old_answer": {"str": "Judaism"}, "seed": 42}}, {"loss_per_step": [16.792, 9.415, 4.732, 0.649, 0.065, 0.023, 0.014, 0.01], "prob_new": [5.095086308415375e-08, 8.14993109088391e-05, 0.008806254714727402, 0.5223711133003235, 0.9368844628334045, 0.9771389365196228, 0.9864928722381592, 0.9902462959289551], "prob_old": [0.9610093832015991, 0.636944591999054, 0.3622311055660248, 0.11267369240522385, 0.09794804453849792, 0.0837545096874237, 0.07487168163061142, 0.07168160378932953], "prob_new_token": [5.095086308415375e-08, 8.14993109088391e-05, 0.008806254714727402, 0.5223711133003235, 0.9368844628334045, 0.9771389365196228, 0.9864928722381592, 0.9902462959289551], "prob_old_token": [0.9285872578620911, 0.28259801864624023, 0.0019025562796741724, 3.804417428909801e-05, 5.052717824582942e-06, 2.0816460164496675e-06, 1.2038339036735124e-06, 7.582991656818194e-07], "l1-model.layers.2.mlp.down_proj.weight": [62612.296875], "l2-model.layers.2.mlp.down_proj.weight": [10.93583869934082], "linf-model.layers.2.mlp.down_proj.weight": [0.0034432620741426945], "request": {"prompt": "{} has its headquarters in", "subject": "Hyundai Motor Company", "target_new": {"str": "Jerusalem"}, "old_answer": {"str": "Seoul"}, "seed": 42}}, {"loss_per_step": [7.336, 3.073, 1.238, 0.309, 0.048, 0.013, 0.006], "prob_new": [0.1783924549818039, 0.49961745738983154, 0.5418857932090759, 0.7695320844650269, 0.9540892839431763, 0.9873582124710083, 0.9940320253372192], "prob_old": [0.9610093832015991, 0.4918437600135803, 0.09477712213993073, 0.13520972430706024, 0.047578562051057816, 0.020516380667686462, 0.013271243311464787], "prob_new_token": [1.1907964108104352e-06, 0.002148368861526251, 0.08405598253011703, 0.539099931716919, 0.9081974625587463, 0.974729061126709, 0.9880747199058533], "prob_old_token": [0.9285872578620911, 0.004107997287064791, 0.00026520935352891684, 8.072037599049509e-05, 7.046819973766105e-06, 1.2581576811498962e-06, 4.983439794159494e-07], "l1-model.layers.2.mlp.down_proj.weight": [60779.64453125], "l2-model.layers.2.mlp.down_proj.weight": [10.339582443237305], "linf-model.layers.2.mlp.down_proj.weight": [0.0029707830399274826], "request": {"prompt": "{} has its headquarters in", "subject": "Hyundai Motor Company", "target_new": {"str": "Crewe"}, "old_answer": {"str": "Seoul"}, "seed": 42}}, {"loss_per_step": [16.699, 8.408, 3.738, 1.788, 0.311, 0.049, 0.029, 0.02, 0.014, 0.01], "prob_new": [5.595259722213086e-08, 0.00022316981630865484, 0.02380657009780407, 0.16728642582893372, 0.7326064109802246, 0.952157199382782, 0.97104811668396, 0.9798310399055481, 0.9860877990722656, 0.990480363368988], "prob_old": [0.9610093832015991, 0.5982217788696289, 0.3014397621154785, 0.4401264786720276, 0.13265582919120789, 0.04672330990433693, 0.02911010943353176, 0.02064485289156437, 0.015623033978044987, 0.011803430505096912], "prob_new_token": [5.595259722213086e-08, 0.00022316981630865484, 0.02380657009780407, 0.16728642582893372, 0.7326064109802246, 0.952157199382782, 0.97104811668396, 0.9798310399055481, 0.9860877990722656, 0.990480363368988], "prob_old_token": [0.9285872578620911, 0.20190678536891937, 0.0017644921317696571, 0.0006782375858165324, 2.7752395908464678e-05, 2.8062972887710202e-06, 1.802951260287955e-06, 1.153347511717584e-06, 7.018011274340097e-07, 4.2844104086725565e-07], "l1-model.layers.2.mlp.down_proj.weight": [72263.15625], "l2-model.layers.2.mlp.down_proj.weight": [12.641666412353516], "linf-model.layers.2.mlp.down_proj.weight": [0.004359422251582146], "request": {"prompt": "{} has its headquarters in", "subject": "Hyundai Motor Company", "target_new": {"str": "Edinburgh"}, "old_answer": {"str": "Seoul"}, "seed": 42}}, {"loss_per_step": [10.796, 5.5, 1.615, 0.276, 0.038, 0.016, 0.013, 0.014, 0.013, 0.009], "prob_new": [2.048414717137348e-05, 0.004086337052285671, 0.19897545874118805, 0.7591521143913269, 0.9622425436973572, 0.9839634299278259, 0.9868392944335938, 0.9858729839324951, 0.9868293404579163, 0.9909351468086243], "prob_old": [0.8966929316520691, 0.44506266713142395, 0.4081922471523285, 0.4866100251674652, 0.4931838810443878, 0.49588924646377563, 0.4971420466899872, 0.49790847301483154, 0.49831318855285645, 0.498429536819458], "prob_new_token": [2.048414717137348e-05, 0.004086337052285671, 0.19897545874118805, 0.7591521143913269, 0.9622425436973572, 0.9839634299278259, 0.9868392944335938, 0.9858729839324951, 0.9868293404579163, 0.9909351468086243], "prob_old_token": [0.7980557680130005, 0.019152915105223656, 0.014691900461912155, 0.01361545454710722, 0.00315562030300498, 0.0014723936328664422, 0.0011016071075573564, 0.0010549264261499047, 0.0009711881866678596, 0.0007009644759818912], "l1-model.layers.2.mlp.down_proj.weight": [75837.40625], "l2-model.layers.2.mlp.down_proj.weight": [12.83942985534668], "linf-model.layers.2.mlp.down_proj.weight": [0.004355637356638908], "request": {"prompt": "{} has its headquarters in", "subject": "Vimeo", "target_new": {"str": "Stockholm"}, "old_answer": {"str": "New York"}, "seed": 42}}, {"loss_per_step": [3.999, 2.157, 0.189, 0.013, 0.005], "prob_new": [0.35732850432395935, 0.3868948817253113, 0.83195960521698, 0.9874631762504578, 0.9949633479118347], "prob_old": [0.8966929316520691, 0.2937430739402771, 0.1344582438468933, 0.1907513439655304, 0.2144743800163269], "prob_new_token": [2.1942649254924618e-05, 0.004655590280890465, 0.7200206518173218, 0.9748785495758057, 0.9915677905082703], "prob_old_token": [0.7980557680130005, 0.018959416076540947, 0.001458845566958189, 0.00017892807954922318, 6.217975897016004e-05], "l1-model.layers.2.mlp.down_proj.weight": [45824.0234375], "l2-model.layers.2.mlp.down_proj.weight": [7.980555534362793], "linf-model.layers.2.mlp.down_proj.weight": [0.002002241089940071], "request": {"prompt": "{} has its headquarters in", "subject": "Vimeo", "target_new": {"str": "Philadelphia, Pennsylvania"}, "old_answer": {"str": "New York"}, "seed": 42}}, {"loss_per_step": [3.743, 0.617, 0.34, 0.179, 0.066, 0.025, 0.011, 0.006], "prob_new": [0.2795184254646301, 0.5670790672302246, 0.7119988203048706, 0.8377823829650879, 0.9370837211608887, 0.9751498103141785, 0.9889089465141296, 0.9939597845077515], "prob_old": [0.8966929316520691, 0.445198655128479, 0.4618678092956543, 0.4349563419818878, 0.3894810378551483, 0.308475524187088, 0.22415374219417572, 0.16264361143112183], "prob_new_token": [8.744558726903051e-05, 0.4038471281528473, 0.7477434873580933, 0.8437530398368835, 0.9579786658287048, 0.9841681122779846, 0.9921314120292664, 0.994831383228302], "prob_old_token": [0.7980557680130005, 0.009084256365895271, 0.005258123856037855, 0.0028491015546023846, 0.0001695459650363773, 2.828659489750862e-05, 8.809844075585715e-06, 4.459348019736353e-06], "l1-model.layers.2.mlp.down_proj.weight": [69289.828125], "l2-model.layers.2.mlp.down_proj.weight": [11.44179916381836], "linf-model.layers.2.mlp.down_proj.weight": [0.0034746581222862005], "request": {"prompt": "{} has its headquarters in", "subject": "Vimeo", "target_new": {"str": "Amsterdam, Netherlands"}, "old_answer": {"str": "New York"}, "seed": 42}}, {"loss_per_step": [3.524, 2.288, 1.302, 0.896, 0.581, 0.068, 0.006], "prob_new": [0.47731471061706543, 0.5573139190673828, 0.7068694829940796, 0.7642701268196106, 0.8359329104423523, 0.9438234567642212, 0.9935670495033264], "prob_old": [0.7825582027435303, 0.1925862431526184, 0.2600499987602234, 0.23171581327915192, 0.19341962039470673, 0.25046437978744507, 0.23416422307491302], "prob_new_token": [6.658617479615714e-08, 1.3835609024681617e-05, 0.0004190372710581869, 0.004246612545102835, 0.020434262230992317, 0.6527416110038757, 0.9897211194038391], "prob_old_token": [0.7788311839103699, 1.0053599908133037e-05, 3.0954572139307857e-05, 1.623323441890534e-05, 0.00014020645176060498, 6.078653314034455e-07, 5.229809119100537e-08], "l1-model.layers.2.mlp.down_proj.weight": [62669.28125], "l2-model.layers.2.mlp.down_proj.weight": [10.393716812133789], "linf-model.layers.2.mlp.down_proj.weight": [0.0030138827860355377], "request": {"prompt": "{} entered this world in the location of", "subject": "Rachel Maddow", "target_new": {"str": "Bourg-la-Reine"}, "old_answer": {"str": "Castro Valley, California"}, "seed": 42}}, {"loss_per_step": [2.998, 3.887, 0.855, 0.219, 0.19, 0.133, 0.113, 0.086, 0.063, 0.046, 0.033, 0.023, 0.015, 0.01], "prob_new": [0.6149026155471802, 0.3328228294849396, 0.561219334602356, 0.8112529516220093, 0.8300149440765381, 0.8773112297058105, 0.8947716951370239, 0.9188375473022461, 0.9394822716712952, 0.9554002285003662, 0.9678834676742554, 0.9775341749191284, 0.9850223064422607, 0.9905498623847961], "prob_old": [0.7825582027435303, 0.19056345522403717, 0.24631786346435547, 0.19215118885040283, 0.20584456622600555, 0.23255833983421326, 0.2271578460931778, 0.23012247681617737, 0.23764963448047638, 0.2456922084093094, 0.2529991567134857, 0.2593255341053009, 0.26498764753341675, 0.2707121670246124], "prob_new_token": [1.1470999197626952e-05, 7.47479998608469e-06, 0.09063505381345749, 0.6401658654212952, 0.7328018546104431, 0.9478545784950256, 0.9680812358856201, 0.9758934378623962, 0.9805423021316528, 0.9852787852287292, 0.9900190830230713, 0.9938102960586548, 0.9963453412055969, 0.9978731870651245], "prob_old_token": [0.7788311839103699, 9.866186019280576e-07, 1.0130543159903027e-06, 1.1100534038632759e-06, 5.442673796096642e-07, 9.199808914672758e-08, 8.97085428164246e-08, 9.392843480782176e-08, 8.90566198563647e-08, 7.155818337878372e-08, 4.8913619821178145e-08, 3.063809828063313e-08, 1.92044549152115e-08, 1.27865815713335e-08], "l1-model.layers.2.mlp.down_proj.weight": [87055.203125], "l2-model.layers.2.mlp.down_proj.weight": [14.99947452545166], "linf-model.layers.2.mlp.down_proj.weight": [0.0063573746010661125], "request": {"prompt": "{} entered this world in the location of", "subject": "Rachel Maddow", "target_new": {"str": "Queens, New York"}, "old_answer": {"str": "Castro Valley, California"}, "seed": 42}}, {"loss_per_step": [3.428, 2.531, 1.159, 0.51, 0.112, 0.041, 0.026, 0.021, 0.021, 0.018, 0.016, 0.013, 0.012, 0.011, 0.009], "prob_new": [0.4513727128505707, 0.44355079531669617, 0.6548847556114197, 0.7782629728317261, 0.9001222848892212, 0.959723174571991, 0.9744605422019958, 0.9789524078369141, 0.979341983795166, 0.9817461967468262, 0.9845288395881653, 0.9866582751274109, 0.9880858659744263, 0.9893638491630554, 0.9906713366508484], "prob_old": [0.7825582027435303, 0.16609618067741394, 0.3636242151260376, 0.22308088839054108, 0.27309197187423706, 0.3999232351779938, 0.4452374577522278, 0.46219098567962646, 0.47156238555908203, 0.48238107562065125, 0.4959448575973511, 0.5086230635643005, 0.516867458820343, 0.5219389200210571, 0.5249508619308472], "prob_new_token": [3.683622708194889e-06, 0.0001259508280782029, 0.007852795533835888, 0.09704837203025818, 0.7175585031509399, 0.9505576491355896, 0.9818267226219177, 0.9770018458366394, 0.9704214334487915, 0.97683185338974, 0.9850903749465942, 0.9908228516578674, 0.9928516745567322, 0.9932942986488342, 0.9935933351516724], "prob_old_token": [0.7788311839103699, 3.705863264258369e-06, 3.0659145977551816e-06, 7.964991709741298e-06, 1.021275352286466e-06, 6.524898310544813e-08, 2.0226959662750232e-08, 2.7545809189177817e-08, 4.576117618171338e-08, 3.745083532180615e-08, 2.105992713552496e-08, 1.2307928010102387e-08, 1.1319730042202991e-08, 1.2255712000808217e-08, 1.219871403890238e-08], "l1-model.layers.2.mlp.down_proj.weight": [90686.328125], "l2-model.layers.2.mlp.down_proj.weight": [15.477161407470703], "linf-model.layers.2.mlp.down_proj.weight": [0.006281349807977676], "request": {"prompt": "{} entered this world in the location of", "subject": "Rachel Maddow", "target_new": {"str": "Grand Rapids, Minnesota"}, "old_answer": {"str": "Castro Valley, California"}, "seed": 42}}, {"loss_per_step": [4.425, 0.425, 0.589, 0.111, 0.056, 0.032, 0.011, 0.005], "prob_new": [0.3284355401992798, 0.6727733016014099, 0.6507375240325928, 0.897743821144104, 0.9458626508712769, 0.9690660238265991, 0.9893901348114014, 0.9953114986419678], "prob_old": [0.7979272603988647, 0.5857354402542114, 0.47082096338272095, 0.4895848333835602, 0.48934701085090637, 0.4871065318584442, 0.48604291677474976, 0.4841196537017822], "prob_new_token": [7.54646953282645e-06, 0.4766923785209656, 0.231733039021492, 0.9611304998397827, 0.9792127013206482, 0.9894700050354004, 0.9953745603561401, 0.9980097413063049], "prob_old_token": [0.6284904479980469, 0.0015313047915697098, 0.0007184340502135456, 2.1976951757096685e-05, 1.0143937288376037e-05, 4.6903860493330285e-06, 2.5150616238533985e-06, 1.063149966284982e-06], "l1-model.layers.2.mlp.down_proj.weight": [62364.91015625], "l2-model.layers.2.mlp.down_proj.weight": [10.914599418640137], "linf-model.layers.2.mlp.down_proj.weight": [0.003492810530588031], "request": {"prompt": "{} entered this world in the location of", "subject": "Albrecht D\u00fcrer", "target_new": {"str": "Florence, Italy"}, "old_answer": {"str": "Nuremberg"}, "seed": 42}}, {"loss_per_step": [3.647, 2.064, 1.792, 1.131, 0.614, 0.237, 0.051, 0.023, 0.01, 0.006], "prob_new": [0.2978762984275818, 0.44369909167289734, 0.5558691024780273, 0.7198259234428406, 0.700654923915863, 0.8300852179527283, 0.9522653818130493, 0.9779579043388367, 0.9896170496940613, 0.9936617016792297], "prob_old": [0.7979272603988647, 0.49055638909339905, 0.5077701807022095, 0.5232206583023071, 0.48750603199005127, 0.4949495494365692, 0.49347758293151855, 0.49432700872421265, 0.4960859417915344, 0.49750885367393494], "prob_new_token": [1.9384273400646634e-05, 0.0026611117646098137, 0.0006716428906656802, 0.005521979182958603, 0.09800545126199722, 0.3995955288410187, 0.8510032296180725, 0.9429746270179749, 0.9801633954048157, 0.9917356371879578], "prob_old_token": [0.6284904479980469, 0.003838163334876299, 0.005964280571788549, 0.004619342274963856, 0.002225219737738371, 0.0014556172536686063, 0.00020294918795116246, 5.9554844483500347e-05, 1.761573184921872e-05, 7.871386515034828e-06], "l1-model.layers.2.mlp.down_proj.weight": [70085.125], "l2-model.layers.2.mlp.down_proj.weight": [12.33582592010498], "linf-model.layers.2.mlp.down_proj.weight": [0.004348451271653175], "request": {"prompt": "{} entered this world in the location of", "subject": "Albrecht D\u00fcrer", "target_new": {"str": "Aberdeen, Washington"}, "old_answer": {"str": "Nuremberg"}, "seed": 42}}, {"loss_per_step": [7.303, 5.901, 2.825, 0.484, 0.467, 0.098, 0.05, 0.028, 0.016, 0.01], "prob_new": [0.005293817725032568, 0.013427066616714, 0.1087457537651062, 0.689497709274292, 0.6932608485221863, 0.90973299741745, 0.9516823887825012, 0.9726651906967163, 0.9841910600662231, 0.9901657104492188], "prob_old": [0.7979272603988647, 0.6030694842338562, 0.5465407967567444, 0.6148734092712402, 0.6063941717147827, 0.5680727362632751, 0.5522500276565552, 0.5466811656951904, 0.5467942357063293, 0.5480461716651917], "prob_new_token": [4.2988340283045545e-05, 0.0002816971973516047, 0.017594074830412865, 0.38048917055130005, 0.3970836102962494, 0.8316167593002319, 0.9228989481925964, 0.9636732935905457, 0.9815247058868408, 0.9894523620605469], "prob_old_token": [0.6284904479980469, 0.0014886035351082683, 0.0034108858089894056, 0.0008290982805192471, 0.0017621420556679368, 0.00045480928383767605, 0.00014959370309952646, 5.89769879297819e-05, 2.6993186111212708e-05, 1.4510203982354142e-05], "l1-model.layers.2.mlp.down_proj.weight": [69853.90625], "l2-model.layers.2.mlp.down_proj.weight": [12.288317680358887], "linf-model.layers.2.mlp.down_proj.weight": [0.004402014426887035], "request": {"prompt": "{} entered this world in the location of", "subject": "Albrecht D\u00fcrer", "target_new": {"str": "Reus"}, "old_answer": {"str": "Nuremberg"}, "seed": 42}}, {"loss_per_step": [11.799, 5.599, 4.335, 0.109, 0.042, 0.019, 0.012, 0.01], "prob_new": [7.5101984293723945e-06, 0.0037013059481978416, 0.013105985708534718, 0.8968781232833862, 0.9590283036231995, 0.9808936715126038, 0.9883499145507812, 0.990398645401001], "prob_old": [0.8133355975151062, 0.375977098941803, 0.45674392580986023, 0.4908859431743622, 0.49411502480506897, 0.4928887188434601, 0.49109911918640137, 0.4895757734775543], "prob_new_token": [7.5101984293723945e-06, 0.0037013059481978416, 0.013105985708534718, 0.8968781232833862, 0.9590283036231995, 0.9808936715126038, 0.9883499145507812, 0.990398645401001], "prob_old_token": [0.7344122529029846, 0.00124486128333956, 0.00038228367338888347, 0.0009205874521285295, 0.00044118406367488205, 0.0001409580436302349, 4.654539952753112e-05, 2.3152062567532994e-05], "l1-model.layers.2.mlp.down_proj.weight": [66345.2109375], "l2-model.layers.2.mlp.down_proj.weight": [11.270684242248535], "linf-model.layers.2.mlp.down_proj.weight": [0.0034734122455120087], "request": {"prompt": "{} passed away in the location of", "subject": "Wilhelm R\u00f6ntgen", "target_new": {"str": "Paris"}, "old_answer": {"str": "Munich"}, "seed": 42}}, {"loss_per_step": [4.471, 2.703, 0.601, 0.33, 0.393, 0.089, 0.11, 0.044, 0.01, 0.002], "prob_new": [0.2872834801673889, 0.47686606645584106, 0.7635350227355957, 0.8060222864151001, 0.7318016290664673, 0.9244727492332458, 0.9102233648300171, 0.9596476554870605, 0.9901473522186279, 0.998324990272522], "prob_old": [0.8133355975151062, 0.3041486442089081, 0.1649162620306015, 0.16321462392807007, 0.09245505928993225, 0.0947130024433136, 0.09483589977025986, 0.16510453820228577, 0.23277172446250916, 0.312142938375473], "prob_new_token": [0.000622739375103265, 0.002411101944744587, 0.09417475014925003, 0.2843146324157715, 0.5678713321685791, 0.7094026207923889, 0.6522360444068909, 0.8447117805480957, 0.9636415243148804, 0.99457848072052], "prob_old_token": [0.7344122529029846, 0.0001466556714149192, 8.669774979352951e-05, 0.00012030569632770494, 1.3254916666483041e-05, 9.201985449180938e-06, 8.11782774690073e-06, 1.3675939953827765e-06, 5.69431009012078e-08, 5.190946006372599e-10], "l1-model.layers.2.mlp.down_proj.weight": [70559.6171875], "l2-model.layers.2.mlp.down_proj.weight": [12.255979537963867], "linf-model.layers.2.mlp.down_proj.weight": [0.0044568981975317], "request": {"prompt": "{} passed away in the location of", "subject": "Wilhelm R\u00f6ntgen", "target_new": {"str": "Ephesus"}, "old_answer": {"str": "Munich"}, "seed": 42}}, {"loss_per_step": [2.974, 1.891, 0.932, 0.101, 0.023, 0.021, 0.01], "prob_new": [0.6511784791946411, 0.5603466033935547, 0.6741414070129395, 0.9075835943222046, 0.9777122735977173, 0.9790410399436951, 0.9901312589645386], "prob_old": [0.8133355975151062, 0.37564098834991455, 0.473580539226532, 0.4309854805469513, 0.4611174464225769, 0.47415652871131897, 0.48141804337501526], "prob_new_token": [0.00013980829680804163, 0.0050572785548865795, 0.06372598558664322, 0.7946766018867493, 0.9570397138595581, 0.9586338400840759, 0.9815680384635925], "prob_old_token": [0.7344122529029846, 0.00010109301365446299, 0.0013737725093960762, 5.2437685553741176e-06, 2.0889083316433243e-06, 3.1713775570096914e-06, 7.766489602545334e-07], "l1-model.layers.2.mlp.down_proj.weight": [59564.84375], "l2-model.layers.2.mlp.down_proj.weight": [10.123702049255371], "linf-model.layers.2.mlp.down_proj.weight": [0.0029608644545078278], "request": {"prompt": "{} passed away in the location of", "subject": "Wilhelm R\u00f6ntgen", "target_new": {"str": "Montreux"}, "old_answer": {"str": "Munich"}, "seed": 42}}, {"loss_per_step": [3.754, 1.916, 2.006, 1.433, 0.872, 0.694, 0.354, 0.076, 0.023, 0.018, 0.012, 0.008], "prob_new": [0.46865397691726685, 0.4845947027206421, 0.5398527383804321, 0.6852735280990601, 0.7644573450088501, 0.8313300609588623, 0.8562504053115845, 0.934813380241394, 0.9779143929481506, 0.9827633500099182, 0.9880239367485046, 0.9919745922088623], "prob_old": [0.6166081428527832, 0.19583454728126526, 0.28105998039245605, 0.4006783962249756, 0.433889240026474, 0.469510555267334, 0.4542737901210785, 0.4890836179256439, 0.4926908016204834, 0.4942663311958313, 0.5049479007720947, 0.5190804600715637], "prob_new_token": [3.655817636172287e-06, 0.0006206934922374785, 0.0011998951667919755, 0.00033567799255251884, 0.005059022456407547, 0.009450560435652733, 0.09308337420225143, 0.669724702835083, 0.9521175622940063, 0.9742220044136047, 0.9851747155189514, 0.9903906583786011], "prob_old_token": [0.7293808460235596, 0.00018493102106731385, 0.00020716071594506502, 5.21859728905838e-05, 0.00011013204493792728, 0.00021149535314179957, 2.5460180040681735e-05, 2.431006805636571e-06, 2.671874099746674e-08, 1.4022274896774434e-08, 1.332320653801844e-08, 1.4438665374427728e-08], "l1-model.layers.2.mlp.down_proj.weight": [80346.984375], "l2-model.layers.2.mlp.down_proj.weight": [14.003647804260254], "linf-model.layers.2.mlp.down_proj.weight": [0.0052969083189964294], "request": {"prompt": "{} passed away in the location of", "subject": "Sandro Botticelli", "target_new": {"str": "Berkeley, Gloucestershire"}, "old_answer": {"str": "Florence, Italy"}, "seed": 42}}, {"loss_per_step": [3.176, 1.682, 1.2, 0.352, 0.071, 0.031, 0.012, 0.005], "prob_new": [0.5475641489028931, 0.6547021269798279, 0.5196912884712219, 0.7763445973396301, 0.9335633516311646, 0.9695112109184265, 0.9884800314903259, 0.9953195452690125], "prob_old": [0.6166081428527832, 0.392513245344162, 0.4382692277431488, 0.508224606513977, 0.5021555423736572, 0.5280250310897827, 0.5674232840538025, 0.5842318534851074], "prob_new_token": [8.43507734771265e-07, 0.0005772861768491566, 0.027821136638522148, 0.26510393619537354, 0.8961782455444336, 0.9385347366333008, 0.9753886461257935, 0.9917206168174744], "prob_old_token": [0.7293808460235596, 0.00097592145903036, 8.075956429820508e-05, 8.090866322163492e-05, 7.706028668508225e-07, 1.361342043537661e-07, 2.8777108695976494e-08, 9.112971710578677e-09], "l1-model.layers.2.mlp.down_proj.weight": [58466.66015625], "l2-model.layers.2.mlp.down_proj.weight": [10.458218574523926], "linf-model.layers.2.mlp.down_proj.weight": [0.0034833885729312897], "request": {"prompt": "{} passed away in the location of", "subject": "Sandro Botticelli", "target_new": {"str": "Johannesburg, South Africa"}, "old_answer": {"str": "Florence, Italy"}, "seed": 42}}, {"loss_per_step": [7.018, 3.955, 1.414, 0.211, 0.031, 0.013, 0.009], "prob_new": [0.48342373967170715, 0.32368287444114685, 0.5251967310905457, 0.8261033892631531, 0.9697965979576111, 0.9871282577514648, 0.9915332794189453], "prob_old": [0.6166081428527832, 0.27665847539901733, 0.4080168902873993, 0.42174386978149414, 0.4401569068431854, 0.45088231563568115, 0.45300543308258057], "prob_new_token": [8.301199159177486e-07, 0.0005676009459421039, 0.05962356925010681, 0.660872220993042, 0.9439613819122314, 0.9776681065559387, 0.9861422181129456], "prob_old_token": [0.7293808460235596, 0.0016250722110271454, 0.0005333523149602115, 4.534190520644188e-05, 4.409989287523786e-06, 9.782733059182647e-07, 3.932117635940813e-07], "l1-model.layers.2.mlp.down_proj.weight": [58642.58203125], "l2-model.layers.2.mlp.down_proj.weight": [10.150253295898438], "linf-model.layers.2.mlp.down_proj.weight": [0.002994387410581112], "request": {"prompt": "{} passed away in the location of", "subject": "Sandro Botticelli", "target_new": {"str": "Munich"}, "old_answer": {"str": "Florence, Italy"}, "seed": 42}}, {"loss_per_step": [4.618, 2.057, 0.25, 0.09, 0.03, 0.017, 0.014, 0.012, 0.012, 0.011, 0.01], "prob_new": [0.33125823736190796, 0.3620397448539734, 0.8045114278793335, 0.9195219874382019, 0.971267580986023, 0.9834924936294556, 0.9864616394042969, 0.9877447485923767, 0.9885897636413574, 0.989425539970398, 0.9904179573059082], "prob_old": [0.9821176528930664, 0.0013468866236507893, 0.0014867010759189725, 0.00011484008427942172, 3.503213156363927e-05, 1.6252208297373727e-05, 1.2361175322439522e-05, 1.148975297837751e-05, 1.1573696610867046e-05, 1.1790077223849948e-05, 1.1806223483290523e-05], "prob_new_token": [0.0008528511389158666, 0.03493507206439972, 0.5402452349662781, 0.7834933400154114, 0.9288753867149353, 0.9634172916412354, 0.9725824594497681, 0.9770968556404114, 0.9801517724990845, 0.9826511740684509, 0.984901487827301], "prob_old_token": [0.9821176528930664, 0.0013468866236507893, 0.0014867010759189725, 0.00011484008427942172, 3.503213156363927e-05, 1.6252208297373727e-05, 1.2361175322439522e-05, 1.148975297837751e-05, 1.1573696610867046e-05, 1.1790077223849948e-05, 1.1806223483290523e-05], "l1-model.layers.2.mlp.down_proj.weight": [79767.7890625], "l2-model.layers.2.mlp.down_proj.weight": [13.556209564208984], "linf-model.layers.2.mlp.down_proj.weight": [0.004793886095285416], "request": {"prompt": "{} belongs to the continent of", "subject": "Gibraltar", "target_new": {"str": "the Americas"}, "old_answer": {"str": "Europe"}, "seed": 42}}, {"loss_per_step": [4.494, 4.521, 3.01, 0.477, 0.086, 0.05, 0.036, 0.03, 0.028, 0.026, 0.024, 0.022, 0.02, 0.018, 0.015, 0.013, 0.011, 0.009], "prob_new": [0.40577608346939087, 0.022292055189609528, 0.1559799313545227, 0.6771214008331299, 0.9188454747200012, 0.9512022733688354, 0.9650845527648926, 0.9705089926719666, 0.9729495644569397, 0.9746047854423523, 0.9762164354324341, 0.9781018495559692, 0.9803026914596558, 0.9826670289039612, 0.9850032329559326, 0.9871624708175659, 0.9890639781951904, 0.9906841516494751], "prob_old": [0.9821176528930664, 0.0001188185196951963, 0.0002199782757088542, 0.007048416882753372, 0.00029512576293200254, 0.00015350733883678913, 0.00011913348862435669, 0.0001108220312744379, 0.00011047897714888677, 0.0001102197784348391, 0.00010592799662845209, 9.606244566384703e-05, 8.19056440377608e-05, 6.612130528083071e-05, 5.1085033192066476e-05, 3.820578785962425e-05, 2.796883200062439e-05, 2.0245382984285243e-05], "prob_new_token": [0.00015386084851343185, 0.0028357673436403275, 0.007989268749952316, 0.40622639656066895, 0.8758545517921448, 0.9234533309936523, 0.9452534317970276, 0.9541494846343994, 0.9581218957901001, 0.9608004689216614, 0.9634497761726379, 0.9666173458099365, 0.9703636765480042, 0.9744014143943787, 0.9783717393875122, 0.9820042252540588, 0.9851564168930054, 0.9877938032150269], "prob_old_token": [0.9821176528930664, 0.0001188185196951963, 0.0002199782757088542, 0.007048416882753372, 0.00029512576293200254, 0.00015350733883678913, 0.00011913348862435669, 0.0001108220312744379, 0.00011047897714888677, 0.0001102197784348391, 0.00010592799662845209, 9.606244566384703e-05, 8.19056440377608e-05, 6.612130528083071e-05, 5.1085033192066476e-05, 3.820578785962425e-05, 2.796883200062439e-05, 2.0245382984285243e-05], "l1-model.layers.2.mlp.down_proj.weight": [95413.34375], "l2-model.layers.2.mlp.down_proj.weight": [16.548437118530273], "linf-model.layers.2.mlp.down_proj.weight": [0.00757831335067749], "request": {"prompt": "{} belongs to the continent of", "subject": "Gibraltar", "target_new": {"str": "North America"}, "old_answer": {"str": "Europe"}, "seed": 42}}, {"loss_per_step": [4.551, 2.834, 0.415, 0.021, 0.015, 0.012, 0.011, 0.01], "prob_new": [0.5188276767730713, 0.33629292249679565, 0.7555596828460693, 0.9797800779342651, 0.9849640727043152, 0.9879980087280273, 0.9893534183502197, 0.9901215434074402], "prob_old": [0.9821176528930664, 0.00021109114459250122, 0.00042325782123953104, 0.00101213448215276, 0.0002240646572317928, 5.839983350597322e-05, 3.284836930106394e-05, 3.063434633077122e-05], "prob_new_token": [2.102440930684679e-06, 0.003564288839697838, 0.29684752225875854, 0.9600090980529785, 0.9795034527778625, 0.9882462620735168, 0.9889249205589294, 0.9873679280281067], "prob_old_token": [0.9821176528930664, 0.00021109114459250122, 0.00042325782123953104, 0.00101213448215276, 0.0002240646572317928, 5.839983350597322e-05, 3.284836930106394e-05, 3.063434633077122e-05], "l1-model.layers.2.mlp.down_proj.weight": [69367.40625], "l2-model.layers.2.mlp.down_proj.weight": [11.39181137084961], "linf-model.layers.2.mlp.down_proj.weight": [0.0034711677581071854], "request": {"prompt": "{} belongs to the continent of", "subject": "Gibraltar", "target_new": {"str": "Antarctica"}, "old_answer": {"str": "Europe"}, "seed": 42}}, {"loss_per_step": [5.435, 2.203, 11.503, 2.777, 1.793, 1.223, 0.622, 0.114, 0.033, 0.017, 0.01], "prob_new": [0.332294762134552, 0.36980900168418884, 0.00013602746184915304, 0.5369088053703308, 0.6123833060264587, 0.6587402820587158, 0.710014283657074, 0.9023622274398804, 0.9679654836654663, 0.9828802943229675, 0.9904664754867554], "prob_old": [0.9558717608451843, 0.5303230881690979, 2.768022022792138e-05, 0.6254173517227173, 0.6265110373497009, 0.5161806344985962, 0.4156869351863861, 0.39044272899627686, 0.35432350635528564, 0.3377534747123718, 0.32957160472869873], "prob_new_token": [1.6631542166578583e-05, 0.013970540836453438, 0.00033297427580691874, 0.0003934719425160438, 0.005521481391042471, 0.026834581047296524, 0.1592991203069687, 0.7157803773880005, 0.9093553423881531, 0.954047441482544, 0.9770442247390747], "prob_old_token": [0.8699713349342346, 0.01415841281414032, 3.4117052564397454e-05, 0.00015983075718395412, 0.001196604105643928, 0.0013267273316159844, 0.0008277136948890984, 0.00014661734167020768, 3.29618742398452e-05, 1.5628775145160034e-05, 9.264292202715296e-06], "l1-model.layers.2.mlp.down_proj.weight": [72738.3046875], "l2-model.layers.2.mlp.down_proj.weight": [12.879895210266113], "linf-model.layers.2.mlp.down_proj.weight": [0.004882196430116892], "request": {"prompt": "{}, whose the capital city is", "subject": "Madeira", "target_new": {"str": "Gaborone"}, "old_answer": {"str": "Funchal"}, "seed": 42}}, {"loss_per_step": [5.152, 1.217, 1.468, 0.182, 0.06, 0.043, 0.029, 0.019, 0.014, 0.011, 0.009], "prob_new": [0.21288073062896729, 0.6102030277252197, 0.6662392020225525, 0.8584194183349609, 0.944840669631958, 0.9591917991638184, 0.9719822406768799, 0.9813103675842285, 0.9865173697471619, 0.9894518256187439, 0.9912729263305664], "prob_old": [0.9558717608451843, 0.5392054319381714, 0.31541934609413147, 0.0058724055998027325, 0.00153289083391428, 0.0009267154964618385, 0.0007473272271454334, 0.0007266313768923283, 0.0007732408121228218, 0.0008441851241514087, 0.0009180462220683694], "prob_new_token": [1.2327059266681317e-05, 0.032558783888816833, 0.012392177246510983, 0.5868803262710571, 0.8463664054870605, 0.8857599496841431, 0.9222480654716492, 0.9495476484298706, 0.9647557735443115, 0.9731656908988953, 0.9782124161720276], "prob_old_token": [0.8699713349342346, 0.009051166474819183, 6.36554614175111e-05, 0.00014988087059464306, 5.297907773638144e-05, 3.912766987923533e-05, 3.087284494540654e-05, 2.2621421521762386e-05, 1.6814403352327645e-05, 1.3152668543625623e-05, 1.080481888493523e-05], "l1-model.layers.2.mlp.down_proj.weight": [74420.015625], "l2-model.layers.2.mlp.down_proj.weight": [13.12055492401123], "linf-model.layers.2.mlp.down_proj.weight": [0.004803077317774296], "request": {"prompt": "{}, whose the capital city is", "subject": "Madeira", "target_new": {"str": "Dhaka"}, "old_answer": {"str": "Funchal"}, "seed": 42}}, {"loss_per_step": [8.007, 3.993, 3.253, 2.126, 1.702, 0.735, 0.257, 0.024, 0.016, 0.014, 0.012, 0.009], "prob_new": [0.0035749729722738266, 0.060460396111011505, 0.3618718385696411, 0.4889536499977112, 0.4908815920352936, 0.6067026257514954, 0.7972147464752197, 0.9765678644180298, 0.9844427108764648, 0.9857794046401978, 0.9880805015563965, 0.9912123680114746], "prob_old": [0.9558717608451843, 0.6248688697814941, 0.25042724609375, 0.3638080358505249, 0.3371856212615967, 0.3435027301311493, 0.3051091730594635, 0.30181559920310974, 0.30140167474746704, 0.29978710412979126, 0.2998720407485962, 0.3000859022140503], "prob_new_token": [1.553952824906446e-05, 0.0028834843542426825, 0.0020698863081634045, 0.014792450703680515, 0.03512922301888466, 0.23520949482917786, 0.6039925217628479, 0.957893431186676, 0.975525438785553, 0.9800916314125061, 0.9842873811721802, 0.9884342551231384], "prob_old_token": [0.8699713349342346, 0.012651568278670311, 0.001231965608894825, 0.0027362629771232605, 0.001965575385838747, 0.004224900156259537, 0.002621894469484687, 8.685515058459714e-05, 2.2612921384279616e-05, 1.5895619071670808e-05, 1.2477625205065124e-05, 9.280315680371132e-06], "l1-model.layers.2.mlp.down_proj.weight": [80778.78125], "l2-model.layers.2.mlp.down_proj.weight": [14.012556076049805], "linf-model.layers.2.mlp.down_proj.weight": [0.005141805857419968], "request": {"prompt": "{}, whose the capital city is", "subject": "Madeira", "target_new": {"str": "Juba"}, "old_answer": {"str": "Funchal"}, "seed": 42}}, {"loss_per_step": [6.129, 2.616, 2.405, 0.723, 0.19, 0.112, 0.081, 0.063, 0.05, 0.041, 0.034, 0.029, 0.024, 0.021, 0.018, 0.016, 0.014, 0.012, 0.011, 0.01], "prob_new": [0.2189430147409439, 0.4934796392917633, 0.4973813593387604, 0.5566980242729187, 0.8364347219467163, 0.8983730673789978, 0.9247024059295654, 0.9406646490097046, 0.9520133137702942, 0.9605926275253296, 0.9671567678451538, 0.9722310304641724, 0.9762321710586548, 0.9794589281082153, 0.9821099042892456, 0.9843190908432007, 0.9861793518066406, 0.9877581596374512, 0.989107608795166, 0.9902684688568115], "prob_old": [0.773881196975708, 0.34533533453941345, 0.0010370456147938967, 0.0003227669803891331, 9.712771134218201e-05, 5.111051723361015e-05, 3.0371926186489873e-05, 2.058882819255814e-05, 1.53425608004909e-05, 1.2095551028323825e-05, 9.896779374685138e-06, 8.323146175825968e-06, 7.1425984060624614e-06, 6.213082542672055e-06, 5.44775002708775e-06, 4.795977019966813e-06, 4.2290616875106934e-06, 3.731311835508677e-06, 3.2933517104538623e-06, 2.908646365540335e-06], "prob_new_token": [1.0830311111931223e-05, 0.005443395581096411, 0.008258855901658535, 0.28403550386428833, 0.711165189743042, 0.8134459853172302, 0.8586870431900024, 0.8872167468070984, 0.9081180691719055, 0.9242469668388367, 0.9367514848709106, 0.9464998245239258, 0.9542281031608582, 0.9604784250259399, 0.9656186103820801, 0.9699013233184814, 0.9735045433044434, 0.9765607714653015, 0.9791714549064636, 0.9814162254333496], "prob_old_token": [0.773881196975708, 0.34533533453941345, 0.0010370456147938967, 0.0003227669803891331, 9.712771134218201e-05, 5.111051723361015e-05, 3.0371926186489873e-05, 2.058882819255814e-05, 1.53425608004909e-05, 1.2095551028323825e-05, 9.896779374685138e-06, 8.323146175825968e-06, 7.1425984060624614e-06, 6.213082542672055e-06, 5.44775002708775e-06, 4.795977019966813e-06, 4.2290616875106934e-06, 3.731311835508677e-06, 3.2933517104538623e-06, 2.908646365540335e-06], "l1-model.layers.2.mlp.down_proj.weight": [95758.796875], "l2-model.layers.2.mlp.down_proj.weight": [16.88222885131836], "linf-model.layers.2.mlp.down_proj.weight": [0.00850171223282814], "request": {"prompt": "The original language of work of {} is", "subject": "Melodifestivalen", "target_new": {"str": "Romanian"}, "old_answer": {"str": "Swedish"}, "seed": 42}}, {"loss_per_step": [3.649, 4.806, 0.904, 0.043, 0.018, 0.009], "prob_new": [0.02600996568799019, 0.008184182457625866, 0.4048077166080475, 0.9576713442802429, 0.9823346138000488, 0.9911438226699829], "prob_old": [0.773881196975708, 0.0073246764950454235, 0.010145660489797592, 9.358000170323066e-06, 1.135840193455806e-06, 4.036755854031071e-07], "prob_new_token": [0.02600996568799019, 0.008184182457625866, 0.4048077166080475, 0.9576713442802429, 0.9823346138000488, 0.9911438226699829], "prob_old_token": [0.773881196975708, 0.0073246764950454235, 0.010145660489797592, 9.358000170323066e-06, 1.135840193455806e-06, 4.036755854031071e-07], "l1-model.layers.2.mlp.down_proj.weight": [54369.6875], "l2-model.layers.2.mlp.down_proj.weight": [9.280384063720703], "linf-model.layers.2.mlp.down_proj.weight": [0.0024968739598989487], "request": {"prompt": "The original language of work of {} is", "subject": "Melodifestivalen", "target_new": {"str": "English"}, "old_answer": {"str": "Swedish"}, "seed": 42}}, {"loss_per_step": [9.855, 5.272, 2.312, 0.32, 0.054, 0.018, 0.01, 0.008], "prob_new": [5.2486044296529144e-05, 0.005135775543749332, 0.09909122437238693, 0.7264789342880249, 0.9474071264266968, 0.9817047119140625, 0.9899658560752869, 0.9918253421783447], "prob_old": [0.773881196975708, 0.11960066109895706, 8.189603977371007e-05, 9.542337647872046e-05, 6.833836778241675e-06, 1.7935088862941484e-06, 9.899182487060898e-07, 8.274247988993011e-07], "prob_new_token": [5.2486044296529144e-05, 0.005135775543749332, 0.09909122437238693, 0.7264789342880249, 0.9474071264266968, 0.9817047119140625, 0.9899658560752869, 0.9918253421783447], "prob_old_token": [0.773881196975708, 0.11960066109895706, 8.189603977371007e-05, 9.542337647872046e-05, 6.833836778241675e-06, 1.7935088862941484e-06, 9.899182487060898e-07, 8.274247988993011e-07], "l1-model.layers.2.mlp.down_proj.weight": [60726.515625], "l2-model.layers.2.mlp.down_proj.weight": [10.727948188781738], "linf-model.layers.2.mlp.down_proj.weight": [0.0033868495374917984], "request": {"prompt": "The original language of work of {} is", "subject": "Melodifestivalen", "target_new": {"str": "Japanese"}, "old_answer": {"str": "Swedish"}, "seed": 42}}, {"loss_per_step": [3.309, 1.233, 0.541, 0.324, 0.109, 0.206, 0.082, 0.081, 0.065, 0.041, 0.025, 0.052, 0.018, 0.019, 0.02, 0.019, 0.014, 0.011, 0.008], "prob_new": [0.4706716537475586, 0.5011950731277466, 0.6890844702720642, 0.7824066281318665, 0.9013802409172058, 0.8421570658683777, 0.9242340326309204, 0.9252365231513977, 0.9390133023262024, 0.9606282114982605, 0.9752784967422485, 0.9508715867996216, 0.981907308101654, 0.980929970741272, 0.9803361892700195, 0.9816266298294067, 0.9856817126274109, 0.9894912838935852, 0.9916572570800781], "prob_old": [0.9521257877349854, 0.6082035303115845, 0.7126569747924805, 0.6785629987716675, 0.710641622543335, 0.6969403624534607, 0.7056068778038025, 0.706710934638977, 0.7055870294570923, 0.7058755159378052, 0.7071599960327148, 0.708720326423645, 0.7098308205604553, 0.7123125195503235, 0.7161954641342163, 0.7175434827804565, 0.7159504294395447, 0.712336540222168, 0.7070325613021851], "prob_new_token": [0.027645083144307137, 0.0799468532204628, 0.15575376152992249, 0.30386221408843994, 0.7462749481201172, 0.9372817277908325, 0.795850396156311, 0.8272930979728699, 0.8687859177589417, 0.9060719609260559, 0.934622049331665, 0.9545179009437561, 0.9644315242767334, 0.9709452390670776, 0.9752966165542603, 0.9782564640045166, 0.9803638458251953, 0.9820882081985474, 0.9836593866348267], "prob_old_token": [0.8340222239494324, 0.07416555285453796, 0.008573911152780056, 0.0030558290891349316, 0.00024230469716712832, 2.0903724362142384e-05, 2.1223291696514934e-05, 1.060972863342613e-05, 5.44443628314184e-06, 2.9752270620519994e-06, 1.7305163737546536e-06, 1.0901101177296368e-06, 9.172594559458958e-07, 8.317714446093305e-07, 7.864508688726346e-07, 7.626906040059112e-07, 7.581552381452639e-07, 7.612779313603824e-07, 7.551433327535051e-07], "l1-model.layers.2.mlp.down_proj.weight": [97207.8359375], "l2-model.layers.2.mlp.down_proj.weight": [16.980815887451172], "linf-model.layers.2.mlp.down_proj.weight": [0.008139634504914284], "request": {"prompt": "{} was originally aired on", "subject": "Rugrats", "target_new": {"str": "the Sci-Fi Channel"}, "old_answer": {"str": "Nickelodeon"}, "seed": 42}}, {"loss_per_step": [3.351, 1.895, 0.946, 0.348, 0.027, 0.005], "prob_new": [0.20045112073421478, 0.2858637571334839, 0.4942716062068939, 0.7479483485221863, 0.9739129543304443, 0.9950627088546753], "prob_old": [0.9521257877349854, 0.6985623240470886, 0.6536267995834351, 0.7320621013641357, 0.7340290546417236, 0.7315177917480469], "prob_new_token": [0.02764512225985527, 0.1070450022816658, 0.1381765455007553, 0.4274517297744751, 0.9386329054832458, 0.9927780032157898], "prob_old_token": [0.8340222239494324, 0.03627519682049751, 0.03151007369160652, 0.004897019825875759, 5.8839406847255304e-05, 1.6164818816832849e-06], "l1-model.layers.2.mlp.down_proj.weight": [54650.015625], "l2-model.layers.2.mlp.down_proj.weight": [9.241862297058105], "linf-model.layers.2.mlp.down_proj.weight": [0.002508722245693207], "request": {"prompt": "{} was originally aired on", "subject": "Rugrats", "target_new": {"str": "the USA Network"}, "old_answer": {"str": "Nickelodeon"}, "seed": 42}}, {"loss_per_step": [4.551, 1.664, 0.804, 0.264, 0.053, 0.011, 0.005], "prob_new": [0.03765115141868591, 0.3599204421043396, 0.569370687007904, 0.8005211353302002, 0.950078010559082, 0.9889821410179138, 0.9951381683349609], "prob_old": [0.9521257877349854, 0.6113940477371216, 0.5560023784637451, 0.5994464159011841, 0.5672656297683716, 0.5828141570091248, 0.6024169921875], "prob_new_token": [0.02764512225985527, 0.09805376082658768, 0.16494718194007874, 0.5046723484992981, 0.8670551180839539, 0.9738454818725586, 0.9906805157661438], "prob_old_token": [0.8340222239494324, 0.10523515194654465, 0.002336310688406229, 0.0005869962624274194, 0.00047835230361670256, 0.00016966662951745093, 8.120363781927153e-05], "l1-model.layers.2.mlp.down_proj.weight": [63794.69921875], "l2-model.layers.2.mlp.down_proj.weight": [10.483939170837402], "linf-model.layers.2.mlp.down_proj.weight": [0.0029862020164728165], "request": {"prompt": "{} was originally aired on", "subject": "Rugrats", "target_new": {"str": "the CW"}, "old_answer": {"str": "Nickelodeon"}, "seed": 42}}, {"loss_per_step": [11.977, 1.904, 0.025, 0.019, 0.015, 0.011, 0.009], "prob_new": [6.290205874392996e-06, 0.14896835386753082, 0.9754919409751892, 0.9810537099838257, 0.984933078289032, 0.9886324405670166, 0.9910855293273926], "prob_old": [0.7823527455329895, 0.016243958845734596, 0.0004134682531002909, 8.813520980766043e-05, 6.187221879372373e-05, 4.9455549742560834e-05, 3.902422031387687e-05], "prob_new_token": [6.290205874392996e-06, 0.14896835386753082, 0.9754919409751892, 0.9810537099838257, 0.984933078289032, 0.9886324405670166, 0.9910855293273926], "prob_old_token": [0.7823527455329895, 0.016243958845734596, 0.0004134682531002909, 8.813520980766043e-05, 6.187221879372373e-05, 4.9455549742560834e-05, 3.902422031387687e-05], "l1-model.layers.2.mlp.down_proj.weight": [63229.3359375], "l2-model.layers.2.mlp.down_proj.weight": [10.546295166015625], "linf-model.layers.2.mlp.down_proj.weight": [0.003011127933859825], "request": {"prompt": "{} has originated in the country named", "subject": "Shar Pei", "target_new": {"str": "Italy"}, "old_answer": {"str": "China"}, "seed": 42}}, {"loss_per_step": [11.487, 3.024, 0.094, 0.031, 0.018, 0.012, 0.008], "prob_new": [1.0261817806167528e-05, 0.04858290031552315, 0.9104520678520203, 0.9695221781730652, 0.9823176264762878, 0.9880840182304382, 0.9917201995849609], "prob_old": [0.7823527455329895, 0.00948643684387207, 0.0007037714240141213, 0.00018654238374438137, 0.00020458834478631616, 0.00015892108785919845, 0.00010583490075077862], "prob_new_token": [1.0261817806167528e-05, 0.04858290031552315, 0.9104520678520203, 0.9695221781730652, 0.9823176264762878, 0.9880840182304382, 0.9917201995849609], "prob_old_token": [0.7823527455329895, 0.00948643684387207, 0.0007037714240141213, 0.00018654238374438137, 0.00020458834478631616, 0.00015892108785919845, 0.00010583490075077862], "l1-model.layers.2.mlp.down_proj.weight": [62725.0234375], "l2-model.layers.2.mlp.down_proj.weight": [10.556008338928223], "linf-model.layers.2.mlp.down_proj.weight": [0.0030083544552326202], "request": {"prompt": "{} has originated in the country named", "subject": "Shar Pei", "target_new": {"str": "Spain"}, "old_answer": {"str": "China"}, "seed": 42}}, {"loss_per_step": [9.955, 2.342, 0.212, 0.082, 0.054, 0.027, 0.012, 0.006], "prob_new": [4.750975494971499e-05, 0.09615301340818405, 0.808700680732727, 0.9209123849868774, 0.9473565220832825, 0.9737138152122498, 0.9885534048080444, 0.9935728907585144], "prob_old": [0.7823527455329895, 0.036428872495889664, 0.003514934564009309, 0.0015186239033937454, 0.00116429990157485, 0.0006215150351636112, 0.0003900582087226212, 0.00029668546631000936], "prob_new_token": [4.750975494971499e-05, 0.09615301340818405, 0.808700680732727, 0.9209123849868774, 0.9473565220832825, 0.9737138152122498, 0.9885534048080444, 0.9935728907585144], "prob_old_token": [0.7823527455329895, 0.036428872495889664, 0.003514934564009309, 0.0015186239033937454, 0.00116429990157485, 0.0006215150351636112, 0.0003900582087226212, 0.00029668546631000936], "l1-model.layers.2.mlp.down_proj.weight": [67300.578125], "l2-model.layers.2.mlp.down_proj.weight": [11.339733123779297], "linf-model.layers.2.mlp.down_proj.weight": [0.0034690184984356165], "request": {"prompt": "{} has originated in the country named", "subject": "Shar Pei", "target_new": {"str": "Japan"}, "old_answer": {"str": "China"}, "seed": 42}}, {"loss_per_step": [5.523, 4.764, 4.101, 2.774, 0.902, 0.489, 0.184, 0.06, 0.029, 0.018, 0.013, 0.01, 0.008], "prob_new": [0.4938949942588806, 0.1459585428237915, 0.48781222105026245, 0.5001352429389954, 0.581965982913971, 0.6877959966659546, 0.8457480669021606, 0.9433103799819946, 0.9716230630874634, 0.9820982217788696, 0.9870274066925049, 0.9899212718009949, 0.991897702217102], "prob_old": [0.9293187856674194, 0.5793202519416809, 0.6628186106681824, 0.741397500038147, 0.7252982258796692, 0.7182512879371643, 0.6833207011222839, 0.6982316970825195, 0.7044356465339661, 0.7055895328521729, 0.7054858207702637, 0.7050333619117737, 0.7040577530860901], "prob_new_token": [1.6136593330884352e-05, 0.00024939500144682825, 0.0002809995785355568, 0.0039098127745091915, 0.1647571176290512, 0.3760821521282196, 0.6919595003128052, 0.8869608044624329, 0.9434859156608582, 0.9643758535385132, 0.9741998910903931, 0.979966938495636, 0.9839056134223938], "prob_old_token": [0.7632028460502625, 0.0028140037320554256, 0.05841410905122757, 0.10369665920734406, 0.01570359244942665, 0.0024722330272197723, 0.00041159361717291176, 5.422391041065566e-05, 1.4395075595530216e-05, 6.093224328651559e-06, 3.4892141229647677e-06, 2.3735558443149785e-06, 1.7512907106720377e-06], "l1-model.layers.2.mlp.down_proj.weight": [82276.234375], "l2-model.layers.2.mlp.down_proj.weight": [14.267355918884277], "linf-model.layers.2.mlp.down_proj.weight": [0.005601028446108103], "request": {"prompt": "{} was founded in the location of", "subject": "China Southern Airlines", "target_new": {"str": "Dubai"}, "old_answer": {"str": "Guangzhou"}, "seed": 42}}, {"loss_per_step": [9.34, 7.424, 6.061, 6.034, 4.476, 2.397, 1.709, 0.304, 0.1, 0.052, 0.03, 0.021, 0.015, 0.011, 0.008], "prob_new": [0.0007351022795774043, 0.00463190209120512, 0.0035798982717096806, 0.006289977580308914, 0.011831365525722504, 0.2828453779220581, 0.5042874217033386, 0.7694745063781738, 0.9083378314971924, 0.9501503109931946, 0.9705917835235596, 0.9794967174530029, 0.9852116107940674, 0.9893102049827576, 0.9921377897262573], "prob_old": [0.9293187856674194, 0.6082172989845276, 0.6633557081222534, 0.6317219138145447, 0.6736449599266052, 0.6167048215866089, 0.5328435301780701, 0.5271725654602051, 0.44162771105766296, 0.42164015769958496, 0.4147821366786957, 0.40133213996887207, 0.38433608412742615, 0.3696397840976715, 0.3586828112602234], "prob_new_token": [0.0014649422373622656, 0.00922517478466034, 0.006296793930232525, 0.012105390429496765, 0.008581330068409443, 0.015038428828120232, 0.033636726438999176, 0.5510673522949219, 0.833002507686615, 0.9131705164909363, 0.9525849223136902, 0.9678006768226624, 0.9759647250175476, 0.981908917427063, 0.9863335490226746], "prob_old_token": [0.7632028460502625, 0.01853817142546177, 0.012308230623602867, 0.01582948863506317, 0.01474187895655632, 0.009537508711218834, 0.0010896938620135188, 0.0016791393281891942, 0.00031701833358965814, 0.00011487228766782209, 5.875165152247064e-05, 4.217150853946805e-05, 3.5720280720852315e-05, 3.201774597982876e-05, 2.9101656764396466e-05], "l1-model.layers.2.mlp.down_proj.weight": [88151.6171875], "l2-model.layers.2.mlp.down_proj.weight": [15.484872817993164], "linf-model.layers.2.mlp.down_proj.weight": [0.006512507330626249], "request": {"prompt": "{} was founded in the location of", "subject": "China Southern Airlines", "target_new": {"str": "Jena"}, "old_answer": {"str": "Guangzhou"}, "seed": 42}}, {"loss_per_step": [7.946, 5.554, 3.205, 2.99, 1.07, 0.747, 0.114, 0.014, 0.006], "prob_new": [0.4137546718120575, 0.22187088429927826, 0.45711371302604675, 0.46256643533706665, 0.36350321769714355, 0.6096646785736084, 0.8975732922554016, 0.9864180684089661, 0.9939795732498169], "prob_old": [0.9293187856674194, 0.49847733974456787, 0.6305097341537476, 0.6721622347831726, 0.46814200282096863, 0.47739481925964355, 0.4868271052837372, 0.49157726764678955, 0.49191218614578247], "prob_new_token": [1.5147047349728382e-07, 3.375999222043902e-05, 0.00180341477971524, 0.0027391454204916954, 0.2432289868593216, 0.22570717334747314, 0.7976204752922058, 0.9741806387901306, 0.9890649914741516], "prob_old_token": [0.7632028460502625, 0.013340512290596962, 0.006050424184650183, 0.011960411444306374, 0.0002145523758372292, 0.00015448176418431103, 7.157678192015737e-05, 9.75919101620093e-06, 4.241623628331581e-06], "l1-model.layers.2.mlp.down_proj.weight": [62882.9609375], "l2-model.layers.2.mlp.down_proj.weight": [11.184849739074707], "linf-model.layers.2.mlp.down_proj.weight": [0.003953598439693451], "request": {"prompt": "{} was founded in the location of", "subject": "China Southern Airlines", "target_new": {"str": "Bremen"}, "old_answer": {"str": "Guangzhou"}, "seed": 42}}, {"loss_per_step": [3.487, 2.338, 1.716, 1.253, 0.082, 0.008], "prob_new": [0.42314139008522034, 0.38935425877571106, 0.6669111251831055, 0.7660795450210571, 0.9274632334709167, 0.9921175241470337], "prob_old": [0.8802522420883179, 0.29220932722091675, 0.2717439830303192, 0.2547711431980133, 0.19765476882457733, 0.04390336945652962], "prob_new_token": [6.021196440997301e-06, 0.0018039231654256582, 0.00047968074795790017, 0.0022744573652744293, 0.7331030368804932, 0.9932696223258972], "prob_old_token": [0.6327256560325623, 0.007209474220871925, 0.007419602479785681, 0.03615119680762291, 0.00039415902574546635, 7.429610491271887e-07], "l1-model.layers.2.mlp.down_proj.weight": [57557.2421875], "l2-model.layers.2.mlp.down_proj.weight": [9.492302894592285], "linf-model.layers.2.mlp.down_proj.weight": [0.002510813996195793], "request": {"prompt": "{} is lead by the person named", "subject": "Dyson", "target_new": {"str": "Judd Apatow"}, "old_answer": {"str": "James Dyson"}, "seed": 42}}, {"loss_per_step": [7.033, 6.885, 4.368, 3.286, 2.391, 0.988, 0.114, 0.038, 0.02, 0.017, 0.014, 0.01], "prob_new": [0.2846667468547821, 0.31201425194740295, 0.3316660523414612, 0.33778631687164307, 0.3619934916496277, 0.48902878165245056, 0.9014376401901245, 0.9642477035522461, 0.9806537628173828, 0.9834067225456238, 0.9862789511680603, 0.990527868270874], "prob_old": [0.8802522420883179, 0.4681625962257385, 0.47791072726249695, 0.4626122713088989, 0.35082298517227173, 0.2988351285457611, 0.1511051058769226, 0.021938443183898926, 0.015981290489435196, 0.013838918879628181, 0.013420027680695057, 0.013878574594855309], "prob_new_token": [0.00011093316425103694, 2.2749095478502568e-06, 0.006795153487473726, 0.022425778210163116, 0.08416536450386047, 0.2982354164123535, 0.7280884385108948, 0.8979896903038025, 0.945573091506958, 0.9533786773681641, 0.9620915055274963, 0.975010335445404], "prob_old_token": [0.6327256560325623, 5.379297817853512e-06, 0.005725146736949682, 0.012972012162208557, 0.037779539823532104, 0.0177749190479517, 0.0025125937536358833, 0.00047084857942536473, 0.00017234391998499632, 9.364861762151122e-05, 6.49564026389271e-05, 5.448609954328276e-05], "l1-model.layers.2.mlp.down_proj.weight": [82335.7421875], "l2-model.layers.2.mlp.down_proj.weight": [14.181255340576172], "linf-model.layers.2.mlp.down_proj.weight": [0.005295370705425739], "request": {"prompt": "{} is lead by the person named", "subject": "Dyson", "target_new": {"str": "George Friedman"}, "old_answer": {"str": "James Dyson"}, "seed": 42}}, {"loss_per_step": [6.315, 4.699, 6.127, 4.0, 3.32, 1.968, 1.176, 0.592, 0.12, 0.013, 0.007], "prob_new": [0.1324782520532608, 0.20317775011062622, 0.333682656288147, 0.33486008644104004, 0.33665019273757935, 0.3784458637237549, 0.5334014892578125, 0.6944383978843689, 0.8972856402397156, 0.9874156713485718, 0.9926173686981201], "prob_old": [0.8802522420883179, 0.29765623807907104, 0.39021220803260803, 0.4273355007171631, 0.3867269456386566, 0.3010575473308563, 0.02554044872522354, 0.013333526439964771, 0.007272954098880291, 0.003936368506401777, 0.003201011335477233], "prob_new_token": [0.00022606723359785974, 0.0014237561263144016, 4.107407221454196e-06, 0.0021742850076407194, 0.006071425974369049, 0.023343071341514587, 0.05301346257328987, 0.18893931806087494, 0.7106265425682068, 0.9761775732040405, 0.9914483428001404], "prob_old_token": [0.6327256560325623, 0.007114624138921499, 1.5965153579600155e-05, 0.009412132203578949, 0.007709568832069635, 0.007833004929125309, 0.00876839179545641, 0.008688664063811302, 0.003631675848737359, 0.00022757753322366625, 2.084666812152136e-05], "l1-model.layers.2.mlp.down_proj.weight": [73242.828125], "l2-model.layers.2.mlp.down_proj.weight": [12.94217586517334], "linf-model.layers.2.mlp.down_proj.weight": [0.004949272610247135], "request": {"prompt": "{} is lead by the person named", "subject": "Dyson", "target_new": {"str": "Marc Mayer"}, "old_answer": {"str": "James Dyson"}, "seed": 42}}, {"loss_per_step": [3.563, 2.025, 0.597, 0.177, 0.063, 0.01, 0.006], "prob_new": [0.5048718452453613, 0.39517849683761597, 0.6503708958625793, 0.8571947813034058, 0.9401203393936157, 0.9899952411651611, 0.9945000410079956], "prob_old": [0.714084267616272, 0.4825199246406555, 0.26479291915893555, 0.25890201330184937, 0.2587849199771881, 0.2539421319961548, 0.25167885422706604], "prob_new_token": [4.025532234663842e-06, 0.011703393422067165, 0.2825622856616974, 0.5770567655563354, 0.8928918242454529, 0.9707042574882507, 0.9899479150772095], "prob_old_token": [0.6126298904418945, 0.0849488154053688, 0.0020821671932935715, 0.00014879563241265714, 8.699400495970622e-05, 2.615945049910806e-05, 7.247015219036257e-06], "l1-model.layers.2.mlp.down_proj.weight": [56739.15625], "l2-model.layers.2.mlp.down_proj.weight": [9.788015365600586], "linf-model.layers.2.mlp.down_proj.weight": [0.002976018935441971], "request": {"prompt": "{} is lead by the person named", "subject": "Charles Schwab Corporation", "target_new": {"str": "Masayoshi Son"}, "old_answer": {"str": "Charles R Schwab"}, "seed": 42}}, {"loss_per_step": [5.602, 4.776, 3.658, 2.813, 1.973, 1.336, 0.898, 0.706, 0.498, 0.32, 0.148, 0.035, 0.013, 0.01], "prob_new": [0.20161043107509613, 0.08265526592731476, 0.22986994683742523, 0.29151830077171326, 0.41307559609413147, 0.5644668340682983, 0.7109940052032471, 0.7759682536125183, 0.7989139556884766, 0.8270853161811829, 0.8879031538963318, 0.9659826159477234, 0.9870476722717285, 0.9905362129211426], "prob_old": [0.714084267616272, 0.44183915853500366, 0.3598969280719757, 0.2755722999572754, 0.27889177203178406, 0.2943365275859833, 0.24740098416805267, 0.23935766518115997, 0.24403773248195648, 0.24849927425384521, 0.25182703137397766, 0.25298354029655457, 0.2521876096725464, 0.2504417300224304], "prob_new_token": [9.207190487359185e-06, 3.855616796499817e-06, 0.0017698361771181226, 0.0030449421610683203, 0.016246283426880836, 0.1403418630361557, 0.6605160236358643, 0.9174337983131409, 0.9533178806304932, 0.9542176127433777, 0.9595363140106201, 0.9695934057235718, 0.9784159660339355, 0.9851527214050293], "prob_old_token": [0.6126298904418945, 0.004847231321036816, 0.23258917033672333, 0.015675701200962067, 0.013258273713290691, 0.06773879379034042, 0.01163544598966837, 0.00039064118755050004, 8.528665784979239e-05, 5.644387420034036e-05, 3.095628198934719e-05, 1.4944974282116164e-05, 8.050730684772134e-06, 4.984459337720182e-06], "l1-model.layers.2.mlp.down_proj.weight": [85145.1640625], "l2-model.layers.2.mlp.down_proj.weight": [15.014433860778809], "linf-model.layers.2.mlp.down_proj.weight": [0.00635639950633049], "request": {"prompt": "{} is lead by the person named", "subject": "Charles Schwab Corporation", "target_new": {"str": "Riccardo Muti"}, "old_answer": {"str": "Charles R Schwab"}, "seed": 42}}, {"loss_per_step": [4.487, 2.882, 2.449, 1.684, 1.152, 0.713, 0.186, 0.031, 0.019, 0.016, 0.014, 0.012, 0.011, 0.01], "prob_new": [0.5412984490394592, 0.6629806756973267, 0.5732961893081665, 0.7438240051269531, 0.7835222482681274, 0.7785772681236267, 0.8657369613647461, 0.9703678488731384, 0.9809578061103821, 0.9837865829467773, 0.9861177802085876, 0.9879526495933533, 0.9892430305480957, 0.9901432394981384], "prob_old": [0.714084267616272, 0.34989771246910095, 0.45881980657577515, 0.30947574973106384, 0.29601937532424927, 0.2385028898715973, 0.20071756839752197, 0.1591092050075531, 0.1190442144870758, 0.08164232969284058, 0.051960960030555725, 0.03362331539392471, 0.02416093833744526, 0.01976812817156315], "prob_new_token": [4.529347563675401e-08, 1.4920344710844802e-06, 8.976114622782916e-05, 0.00029935079510323703, 0.0034341332502663136, 0.03269916772842407, 0.4476874768733978, 0.9243069887161255, 0.9709004163742065, 0.979316771030426, 0.9830027222633362, 0.985687792301178, 0.9880908727645874, 0.9902678728103638], "prob_old_token": [0.6126298904418945, 0.0012540308525785804, 0.09733916819095612, 0.05935788154602051, 0.009983604773879051, 0.002524069044739008, 0.0007112205494195223, 0.00016227792366407812, 9.800616680877283e-05, 8.044617425184697e-05, 6.357283564284444e-05, 4.804123454960063e-05, 3.526792352204211e-05, 2.5559438654454425e-05], "l1-model.layers.2.mlp.down_proj.weight": [82028.8984375], "l2-model.layers.2.mlp.down_proj.weight": [14.373866081237793], "linf-model.layers.2.mlp.down_proj.weight": [0.005924250930547714], "request": {"prompt": "{} is lead by the person named", "subject": "Charles Schwab Corporation", "target_new": {"str": "Giorgio Armani"}, "old_answer": {"str": "Charles R Schwab"}, "seed": 42}}, {"loss_per_step": [3.441, 4.089, 0.195, 0.026, 0.02, 0.015, 0.011, 0.008], "prob_new": [0.47477248311042786, 0.09776865690946579, 0.8311254978179932, 0.9742007255554199, 0.9807534217834473, 0.9856131076812744, 0.9892734289169312, 0.9918426275253296], "prob_old": [0.9123725891113281, 0.6962364912033081, 0.6486892104148865, 0.6475957632064819, 0.6481207013130188, 0.6494531631469727, 0.6501501798629761, 0.6496343612670898], "prob_new_token": [0.0010821707546710968, 0.001447788905352354, 0.7136522531509399, 0.9894910454750061, 0.9939988851547241, 0.9952582716941833, 0.9960173964500427, 0.9966157078742981], "prob_old_token": [0.6529882550239563, 1.6041984054027125e-05, 7.949078280944377e-06, 1.0646008377079852e-06, 7.948626716824947e-07, 5.645339911097835e-07, 3.657597744677332e-07, 2.2371136765286792e-07], "l1-model.layers.2.mlp.down_proj.weight": [69293.15625], "l2-model.layers.2.mlp.down_proj.weight": [11.445624351501465], "linf-model.layers.2.mlp.down_proj.weight": [0.003445593174546957], "request": {"prompt": "{} holds a position at", "subject": "Darleane C. Hoffman", "target_new": {"str": "Columbia University"}, "old_answer": {"str": "Lawrence Berkeley National Laboratory"}, "seed": 42}}, {"loss_per_step": [1.192, 1.292, 0.486, 0.022, 0.012, 0.006], "prob_new": [0.7895854115486145, 0.6935998201370239, 0.8558584451675415, 0.9795242547988892, 0.9884679317474365, 0.9945383667945862], "prob_old": [0.9123725891113281, 0.6491104960441589, 0.6383320689201355, 0.6546261310577393, 0.6882495880126953, 0.6860899925231934], "prob_new_token": [0.00041883750236593187, 0.00194850645493716, 0.03491213172674179, 0.8710606098175049, 0.9350650906562805, 0.9739107489585876], "prob_old_token": [0.6529882550239563, 0.00027370863244868815, 0.0005084228469058871, 1.7313523130724207e-05, 8.869047633197624e-06, 5.912382675887784e-06], "l1-model.layers.2.mlp.down_proj.weight": [56784.1953125], "l2-model.layers.2.mlp.down_proj.weight": [9.366744041442871], "linf-model.layers.2.mlp.down_proj.weight": [0.002506784163415432], "request": {"prompt": "{} holds a position at", "subject": "Darleane C. Hoffman", "target_new": {"str": "Carnegie Mellon University"}, "old_answer": {"str": "Lawrence Berkeley National Laboratory"}, "seed": 42}}, {"loss_per_step": [4.146, 1.242, 0.357, 0.107, 0.004], "prob_new": [0.48203378915786743, 0.5191555023193359, 0.747251033782959, 0.9047171473503113, 0.9959446787834167], "prob_old": [0.9123725891113281, 0.6684747934341431, 0.7116310000419617, 0.692715048789978, 0.6723383665084839], "prob_new_token": [0.20117510855197906, 0.3600129187107086, 0.7466386556625366, 0.9274613857269287, 0.9949730634689331], "prob_old_token": [0.6529882550239563, 0.0008877759682945907, 0.00025690902839414775, 4.501757302932674e-06, 7.035293947410537e-07], "l1-model.layers.2.mlp.down_proj.weight": [53089.4296875], "l2-model.layers.2.mlp.down_proj.weight": [8.45494556427002], "linf-model.layers.2.mlp.down_proj.weight": [0.002005765214562416], "request": {"prompt": "{} holds a position at", "subject": "Darleane C. Hoffman", "target_new": {"str": "the University of Cape Town"}, "old_answer": {"str": "Lawrence Berkeley National Laboratory"}, "seed": 42}}, {"loss_per_step": [2.123, 2.755, 0.134, 0.058, 0.011, 0.007], "prob_new": [0.6134995818138123, 0.42191335558891296, 0.8789910078048706, 0.9441656470298767, 0.9895150065422058, 0.9934775233268738], "prob_old": [0.8484284281730652, 0.44217339158058167, 0.4501226544380188, 0.4216669201850891, 0.40601521730422974, 0.40421542525291443], "prob_new_token": [0.0020436712075024843, 0.0006468524225056171, 0.7903947234153748, 0.9323878288269043, 0.9916609525680542, 0.9936302900314331], "prob_old_token": [0.7124742865562439, 0.028834184631705284, 0.002465609461069107, 0.0006758982199244201, 0.00039978561108000576, 0.0009708094876259565], "l1-model.layers.2.mlp.down_proj.weight": [55827.0234375], "l2-model.layers.2.mlp.down_proj.weight": [9.40731143951416], "linf-model.layers.2.mlp.down_proj.weight": [0.002485106699168682], "request": {"prompt": "{} holds a position at", "subject": "J. M. Coetzee", "target_new": {"str": "Stanford University"}, "old_answer": {"str": "the University of Cape Town"}, "seed": 42}}, {"loss_per_step": [1.439, 0.963, 0.109, 0.021, 0.008], "prob_new": [0.7682777643203735, 0.7512752413749695, 0.916192889213562, 0.9799960851669312, 0.9922847747802734], "prob_old": [0.8484284281730652, 0.45019713044166565, 0.4579145908355713, 0.3860337436199188, 0.3651975095272064], "prob_new_token": [9.221502114087343e-05, 0.003319329349324107, 0.5184494256973267, 0.886867344379425, 0.9600231051445007], "prob_old_token": [0.7124742865562439, 0.05907399579882622, 0.0006926027708686888, 0.0001561378303449601, 8.215426350943744e-05], "l1-model.layers.2.mlp.down_proj.weight": [45219.625], "l2-model.layers.2.mlp.down_proj.weight": [7.89510440826416], "linf-model.layers.2.mlp.down_proj.weight": [0.0020052455365657806], "request": {"prompt": "{} holds a position at", "subject": "J. M. Coetzee", "target_new": {"str": "Carnegie Mellon University"}, "old_answer": {"str": "the University of Cape Town"}, "seed": 42}}, {"loss_per_step": [2.52, 0.99, 0.429, 0.055, 0.007], "prob_new": [0.6281738877296448, 0.8118929862976074, 0.7832398414611816, 0.9479767680168152, 0.9928800463676453], "prob_old": [0.8484284281730652, 0.46595773100852966, 0.4448060989379883, 0.40800127387046814, 0.3998441994190216], "prob_new_token": [6.811330877098953e-06, 0.0030142224859446287, 0.12419981509447098, 0.86899733543396, 0.9844008088111877], "prob_old_token": [0.7124742865562439, 0.005266703199595213, 0.02825249172747135, 0.030143175274133682, 0.0016149895964190364], "l1-model.layers.2.mlp.down_proj.weight": [47355.3984375], "l2-model.layers.2.mlp.down_proj.weight": [8.04367446899414], "linf-model.layers.2.mlp.down_proj.weight": [0.002005818299949169], "request": {"prompt": "{} holds a position at", "subject": "J. M. Coetzee", "target_new": {"str": "Lawrence Berkeley National Laboratory"}, "old_answer": {"str": "the University of Cape Town"}, "seed": 42}}, {"loss_per_step": [2.688, 4.477, 2.122, 1.821, 1.491, 1.057, 0.873, 0.136, 0.022, 0.009], "prob_new": [0.4794250428676605, 0.3528192639350891, 0.6844698786735535, 0.7435758709907532, 0.7756760716438293, 0.7868101000785828, 0.7899419665336609, 0.8991896510124207, 0.9784097671508789, 0.9907823801040649], "prob_old": [0.8382276892662048, 0.30949217081069946, 0.36100685596466064, 0.32820403575897217, 0.3159577548503876, 0.3153727650642395, 0.32052528858184814, 0.30865907669067383, 0.2991059720516205, 0.2960663437843323], "prob_new_token": [2.253292768727988e-05, 7.751469092909247e-05, 4.8159352445509285e-05, 0.0001501965307397768, 0.0006572827114723623, 0.005446139723062515, 0.013553942553699017, 0.5198832750320435, 0.9152945876121521, 0.9825387001037598], "prob_old_token": [0.6083126068115234, 0.013429932296276093, 0.003941325470805168, 0.0037236514035612345, 0.006866164039820433, 0.021109528839588165, 0.023737436160445213, 0.004138428717851639, 6.816259701736271e-05, 4.364921551314183e-06], "l1-model.layers.2.mlp.down_proj.weight": [72638.875], "l2-model.layers.2.mlp.down_proj.weight": [12.621826171875], "linf-model.layers.2.mlp.down_proj.weight": [0.004504235461354256], "request": {"prompt": "{} is held by", "subject": "Minister of Foreign Affairs of Belarus", "target_new": {"str": "Idriss D\u00e9by"}, "old_answer": {"str": "Vladimir Makei"}, "seed": 42}}, {"loss_per_step": [5.808, 3.137, 2.81, 2.3, 1.999, 1.608, 0.954, 0.201, 0.019, 0.006], "prob_new": [0.4812857508659363, 0.6079903841018677, 0.5616281032562256, 0.7412928342819214, 0.7421658635139465, 0.7431250810623169, 0.7500526905059814, 0.8573944568634033, 0.9811015129089355, 0.9939432144165039], "prob_old": [0.8382276892662048, 0.302254319190979, 0.27153754234313965, 0.2710500955581665, 0.2632434070110321, 0.2578592598438263, 0.2572280764579773, 0.2814576029777527, 0.2950761318206787, 0.3083723187446594], "prob_new_token": [9.099828446323954e-08, 7.22465028957231e-06, 4.32252163591329e-05, 0.0001046385004883632, 0.000347041932400316, 0.0016547637060284615, 0.02248045615851879, 0.4635700583457947, 0.9542629718780518, 0.9928783774375916], "prob_old_token": [0.6083126068115234, 0.01030508242547512, 0.0007073476444929838, 0.0012317447690293193, 0.000707171973772347, 0.0003380071430001408, 0.0001075131367542781, 4.123837243241724e-06, 2.1274236416957137e-07, 5.144115178268294e-08], "l1-model.layers.2.mlp.down_proj.weight": [75993.5859375], "l2-model.layers.2.mlp.down_proj.weight": [12.940152168273926], "linf-model.layers.2.mlp.down_proj.weight": [0.004492344334721565], "request": {"prompt": "{} is held by", "subject": "Minister of Foreign Affairs of Belarus", "target_new": {"str": "Gaston Browne"}, "old_answer": {"str": "Vladimir Makei"}, "seed": 42}}, {"loss_per_step": [5.622, 2.801, 2.461, 1.421, 1.405, 0.667, 0.173, 0.016, 0.01, 0.012, 0.011, 0.008], "prob_new": [0.4701083302497864, 0.6039834022521973, 0.49762803316116333, 0.7412416338920593, 0.7202572822570801, 0.7640159130096436, 0.8728163838386536, 0.9845640659332275, 0.9896743893623352, 0.9880660772323608, 0.9887346625328064, 0.9922585487365723], "prob_old": [0.9186565279960632, 0.6359328627586365, 0.615816593170166, 0.6955256462097168, 0.5926405191421509, 0.7347044348716736, 0.7306240200996399, 0.7288487553596497, 0.7383711338043213, 0.7511917948722839, 0.761691153049469, 0.7665840983390808], "prob_new_token": [4.263490609446308e-06, 2.9706301575060934e-05, 0.0005017266958020627, 0.0035278857685625553, 0.004117416683584452, 0.07037170976400375, 0.5090787410736084, 0.962702214717865, 0.993991494178772, 0.9976879358291626, 0.9986281394958496, 0.9990046620368958], "prob_old_token": [0.6722553372383118, 9.448694618185982e-05, 0.0004398805904202163, 0.002037294441834092, 0.0006143787759356201, 0.0005170120857656002, 0.00017979340918827802, 7.764539077470545e-06, 8.395935537919286e-07, 3.2012735573516693e-07, 2.2481604844415415e-07, 1.900612005556468e-07], "l1-model.layers.2.mlp.down_proj.weight": [75060.90625], "l2-model.layers.2.mlp.down_proj.weight": [13.37570858001709], "linf-model.layers.2.mlp.down_proj.weight": [0.005104719661176205], "request": {"prompt": "{} is held by", "subject": "list of heads of state of Chad", "target_new": {"str": "Gaston Browne"}, "old_answer": {"str": "Idriss D\u00e9by"}, "seed": 42}}, {"loss_per_step": [7.107, 5.831, 7.881, 6.116, 3.616, 2.269, 1.218, 0.428, 0.161, 0.095, 0.054, 0.032, 0.021, 0.015, 0.012, 0.01], "prob_new": [0.15828540921211243, 0.22734753787517548, 0.06829018890857697, 0.27922481298446655, 0.3608584403991699, 0.5645000338554382, 0.6506800055503845, 0.745238721370697, 0.8647059202194214, 0.9128462076187134, 0.9479761123657227, 0.9684950709342957, 0.9793726205825806, 0.985032320022583, 0.9882234334945679, 0.990541934967041], "prob_old": [0.9186565279960632, 0.6498354077339172, 0.08560129255056381, 0.293728232383728, 0.552517831325531, 0.5350748300552368, 0.4615359902381897, 0.3963119685649872, 0.3273175358772278, 0.27959924936294556, 0.2679861783981323, 0.2644232213497162, 0.2636384069919586, 0.26402783393859863, 0.26496973633766174, 0.26613831520080566], "prob_new_token": [4.95036510983482e-06, 1.3609747838927433e-05, 1.3353429721973953e-06, 1.2446522305253893e-05, 0.0001524620020063594, 0.001581631600856781, 0.028003906831145287, 0.29330435395240784, 0.6624030470848083, 0.8078092336654663, 0.902449905872345, 0.9495995044708252, 0.9713996052742004, 0.982587993144989, 0.9888447523117065, 0.992482602596283], "prob_old_token": [0.6722553372383118, 9.941405733115971e-05, 1.8755961718852632e-05, 2.1651940187439322e-05, 7.986216951394454e-05, 0.0002169130020774901, 0.00027113014948554337, 0.0001050748978741467, 1.854077163443435e-05, 2.5049002942978404e-06, 4.902058208244853e-07, 1.0392131599701315e-07, 2.7527688573059095e-08, 9.909599363311372e-09, 4.610722026399117e-09, 2.5717949991843625e-09], "l1-model.layers.2.mlp.down_proj.weight": [92904.8203125], "l2-model.layers.2.mlp.down_proj.weight": [16.09627914428711], "linf-model.layers.2.mlp.down_proj.weight": [0.006902286782860756], "request": {"prompt": "{} is held by", "subject": "list of heads of state of Chad", "target_new": {"str": "Vladimir Makei"}, "old_answer": {"str": "Idriss D\u00e9by"}, "seed": 42}}, {"loss_per_step": [3.05, 0.697, 0.095, 0.012, 0.006], "prob_new": [0.46842142939567566, 0.6875244975090027, 0.9129562377929688, 0.9879855513572693, 0.9941778182983398], "prob_old": [0.8401201963424683, 0.6496642231941223, 0.6211676001548767, 0.6426665186882019, 0.6554598808288574], "prob_new_token": [4.251266091159778e-06, 0.061955422163009644, 0.7853123545646667, 0.9848099946975708, 0.9946305155754089], "prob_old_token": [0.8187586665153503, 0.00016175165364984423, 1.332271767751081e-05, 1.0688653446777607e-06, 2.31157045504915e-07], "l1-model.layers.2.mlp.down_proj.weight": [53290.21875], "l2-model.layers.2.mlp.down_proj.weight": [8.499938011169434], "linf-model.layers.2.mlp.down_proj.weight": [0.00200464203953743], "request": {"prompt": "{} is represented by the music label", "subject": "Godspeed You! Black Emperor", "target_new": {"str": "Warner Bros. Records"}, "old_answer": {"str": "Constellation Records"}, "seed": 42}}, {"loss_per_step": [2.417, 1.59, 1.875, 0.328, 0.259, 0.03, 0.027, 0.021, 0.015, 0.01, 0.008], "prob_new": [0.5253622531890869, 0.516476035118103, 0.4918307065963745, 0.7934950590133667, 0.8333395719528198, 0.9707686305046082, 0.9736455678939819, 0.9789556860923767, 0.9850342273712158, 0.9895873069763184, 0.9923787117004395], "prob_old": [0.8401201963424683, 0.6112448573112488, 0.6267969608306885, 0.6349895000457764, 0.6415830850601196, 0.6577758193016052, 0.6758394241333008, 0.6872212886810303, 0.6937240362167358, 0.6982457637786865, 0.7024937868118286], "prob_new_token": [0.00048019958194345236, 0.0011392340529710054, 0.01921098679304123, 0.23196952044963837, 0.8723118901252747, 0.9320279955863953, 0.9390832185745239, 0.951004683971405, 0.9678306579589844, 0.9800814390182495, 0.9866032600402832], "prob_old_token": [0.8187586665153503, 7.016238669166341e-05, 4.094230098417029e-05, 1.0562602255959064e-05, 1.0411779385322006e-06, 5.605415367426758e-07, 5.392711841523123e-07, 4.0380106725024234e-07, 2.2100471142039169e-07, 1.0461159405394937e-07, 5.1799538880459295e-08], "l1-model.layers.2.mlp.down_proj.weight": [71419.9140625], "l2-model.layers.2.mlp.down_proj.weight": [12.65834903717041], "linf-model.layers.2.mlp.down_proj.weight": [0.00473615899682045], "request": {"prompt": "{} is represented by the music label", "subject": "Godspeed You! Black Emperor", "target_new": {"str": "Konvict Muzik"}, "old_answer": {"str": "Constellation Records"}, "seed": 42}}, {"loss_per_step": [5.813, 2.765, 2.691, 1.904, 0.474, 0.016, 0.005], "prob_new": [0.14490577578544617, 0.47900575399398804, 0.568146824836731, 0.44607362151145935, 0.7133684158325195, 0.9840161204338074, 0.995110034942627], "prob_old": [0.8401201963424683, 0.577759861946106, 0.5181371569633484, 0.6795957684516907, 0.6492742300033569, 0.7107778191566467, 0.7083216309547424], "prob_new_token": [6.53521738058771e-06, 0.0005017915391363204, 0.0004414929717313498, 0.007793713826686144, 0.27862828969955444, 0.9849597811698914, 0.9960552453994751], "prob_old_token": [0.8187586665153503, 2.0190662326058373e-05, 0.0002151693479390815, 0.0004555339692160487, 0.0005547097534872591, 5.648283604386961e-06, 6.441692335101834e-07], "l1-model.layers.2.mlp.down_proj.weight": [62473.984375], "l2-model.layers.2.mlp.down_proj.weight": [10.370133399963379], "linf-model.layers.2.mlp.down_proj.weight": [0.0029831398278474808], "request": {"prompt": "{} is represented by the music label", "subject": "Godspeed You! Black Emperor", "target_new": {"str": "Armada Music"}, "old_answer": {"str": "Constellation Records"}, "seed": 42}}, {"loss_per_step": [6.934, 4.322, 2.09, 0.748, 0.057, 0.028, 0.011, 0.005], "prob_new": [0.2340732216835022, 0.2783621847629547, 0.621680736541748, 0.6439709067344666, 0.9470651149749756, 0.9730674028396606, 0.9895216226577759, 0.995095431804657], "prob_old": [0.9576637148857117, 0.7399872541427612, 0.7166910171508789, 0.6300505995750427, 0.6046937108039856, 0.5840280652046204, 0.5512986779212952, 0.5118571519851685], "prob_new_token": [1.0154884222401961e-07, 3.94389899156522e-05, 0.0021933403331786394, 0.1326543539762497, 0.9826306700706482, 0.997788667678833, 0.9996989965438843, 0.9998618960380554], "prob_old_token": [0.8164881467819214, 3.862010999000631e-05, 4.127849024371244e-05, 3.024190846190322e-06, 4.019014809841792e-08, 3.228967093704682e-09, 2.4472682214060626e-10, 9.533068084932594e-11], "l1-model.layers.2.mlp.down_proj.weight": [62954.16796875], "l2-model.layers.2.mlp.down_proj.weight": [10.93966007232666], "linf-model.layers.2.mlp.down_proj.weight": [0.003476150333881378], "request": {"prompt": "{} is represented by the music label", "subject": "Anne-Sophie Mutter", "target_new": {"str": "Peaceville Records"}, "old_answer": {"str": "Deutsche Grammophon"}, "seed": 42}}, {"loss_per_step": [4.19, 2.781, 3.213, 1.48, 1.106, 0.043, 0.006], "prob_new": [0.34920310974121094, 0.3908049464225769, 0.39750808477401733, 0.7324756979942322, 0.6325812935829163, 0.9594913721084595, 0.9944372177124023], "prob_old": [0.9576637148857117, 0.7724911570549011, 0.7854775190353394, 0.8000966906547546, 0.7610508799552917, 0.6044686436653137, 0.4440343976020813], "prob_new_token": [4.1410003177588806e-06, 0.00020756573940161616, 0.0002933503710664809, 0.002898160833865404, 0.02184450626373291, 0.8867935538291931, 0.9867277145385742], "prob_old_token": [0.8164881467819214, 9.971990220947191e-05, 3.30212787957862e-05, 0.024637795984745026, 0.0002974608796648681, 0.00011590116628212854, 1.0808478691615164e-05], "l1-model.layers.2.mlp.down_proj.weight": [57764.1015625], "l2-model.layers.2.mlp.down_proj.weight": [9.937198638916016], "linf-model.layers.2.mlp.down_proj.weight": [0.003001435659825802], "request": {"prompt": "{} is represented by the music label", "subject": "Anne-Sophie Mutter", "target_new": {"str": "XL Recordings"}, "old_answer": {"str": "Deutsche Grammophon"}, "seed": 42}}, {"loss_per_step": [8.684, 3.969, 2.554, 1.824, 0.145, 0.008], "prob_new": [0.005541480612009764, 0.328922301530838, 0.496268093585968, 0.5064066648483276, 0.8735441565513611, 0.9921382069587708], "prob_old": [0.9576637148857117, 0.7579043507575989, 0.7571048140525818, 0.7535900473594666, 0.7515327334403992, 0.754471480846405], "prob_new_token": [2.58570617006626e-06, 0.0005435269558802247, 0.00612632604315877, 0.026407118886709213, 0.750592827796936, 0.9903554320335388], "prob_old_token": [0.8164881467819214, 0.00032258755527436733, 3.149743497488089e-05, 0.0009776164079084992, 6.944423512322828e-05, 1.8585889165478875e-06], "l1-model.layers.2.mlp.down_proj.weight": [50923.0390625], "l2-model.layers.2.mlp.down_proj.weight": [8.782672882080078], "linf-model.layers.2.mlp.down_proj.weight": [0.002510334365069866], "request": {"prompt": "{} is represented by the music label", "subject": "Anne-Sophie Mutter", "target_new": {"str": "Domino"}, "old_answer": {"str": "Deutsche Grammophon"}, "seed": 42}}, {"loss_per_step": [3.415, 2.808, 1.778, 1.433, 0.911, 0.783, 0.374, 0.149, 0.077, 0.042, 0.027, 0.02, 0.016, 0.013, 0.011, 0.009], "prob_new": [0.46457263827323914, 0.4835490882396698, 0.5865935683250427, 0.7310903668403625, 0.7382342219352722, 0.7036725878715515, 0.8090953826904297, 0.8857099413871765, 0.9317039847373962, 0.9600310325622559, 0.9733846783638, 0.9800071716308594, 0.9845781326293945, 0.9874927401542664, 0.9892303347587585, 0.9906377792358398], "prob_old": [0.9080218076705933, 0.4994511008262634, 0.4835320711135864, 0.3126410245895386, 0.08726564049720764, 0.03537112474441528, 0.14662392437458038, 0.02880086377263069, 0.025175025686621666, 0.02226191572844982, 0.020300470292568207, 0.019100826233625412, 0.0183220487087965, 0.017633438110351562, 0.016841573640704155, 0.01585676521062851], "prob_new_token": [1.5300216546165757e-05, 0.00010031241981778294, 0.002791785402223468, 0.0011396100744605064, 0.015085925348103046, 0.038700465112924576, 0.17662568390369415, 0.5246821045875549, 0.7455229163169861, 0.8802610039710999, 0.9409839510917664, 0.9679033160209656, 0.9805827736854553, 0.9870519042015076, 0.9906566143035889, 0.9928552508354187], "prob_old_token": [0.7662683725357056, 0.004354414995759726, 0.011847635731101036, 0.001561593497171998, 0.0001362927578156814, 2.465692705300171e-05, 8.061017069849186e-06, 3.6420824471861124e-06, 1.6027080391722848e-06, 5.684709094566642e-07, 2.1139017292171047e-07, 9.219746743838186e-08, 4.7501121258619605e-08, 2.8066825308314947e-08, 1.8392038114711795e-08, 1.3069499260609518e-08], "l1-model.layers.2.mlp.down_proj.weight": [90261.46875], "l2-model.layers.2.mlp.down_proj.weight": [15.993952751159668], "linf-model.layers.2.mlp.down_proj.weight": [0.006699392106384039], "request": {"prompt": "The person who serves as the head coach for {} is", "subject": "Spain women's national association football team", "target_new": {"str": "Michael O'Neill"}, "old_answer": {"str": "Jorge Vilda"}, "seed": 42}}, {"loss_per_step": [2.651, 3.633, 1.634, 1.018, 0.031, 0.022, 0.014, 0.01, 0.009], "prob_new": [0.7096829414367676, 0.5279017090797424, 0.7473703622817993, 0.7162652015686035, 0.970613420009613, 0.9788856506347656, 0.9862425327301025, 0.9900384545326233, 0.9913844466209412], "prob_old": [0.9080218076705933, 0.34291669726371765, 0.4192366600036621, 0.3443155884742737, 0.6555876135826111, 0.6552861928939819, 0.6551926732063293, 0.6552704572677612, 0.6550967693328857], "prob_new_token": [2.9521990654757246e-05, 3.9842566366132814e-06, 0.001467755064368248, 0.020011218264698982, 0.8845773935317993, 0.917689323425293, 0.9470638632774353, 0.9623434543609619, 0.9679586887359619], "prob_old_token": [0.7662683725357056, 0.0003011797380167991, 0.020503124222159386, 0.0015024208696559072, 5.157489067642018e-05, 4.348561560618691e-05, 2.783501440717373e-05, 1.9513459847075865e-05, 1.554204754938837e-05], "l1-model.layers.2.mlp.down_proj.weight": [62295.9453125], "l2-model.layers.2.mlp.down_proj.weight": [11.04185962677002], "linf-model.layers.2.mlp.down_proj.weight": [0.0038951924070715904], "request": {"prompt": "The person who serves as the head coach for {} is", "subject": "Spain women's national association football team", "target_new": {"str": "Pia Sundhage"}, "old_answer": {"str": "Jorge Vilda"}, "seed": 42}}, {"loss_per_step": [5.4, 3.299, 2.386, 1.441, 0.281, 1.788, 0.188, 0.81, 0.073, 0.035, 0.035, 0.027, 0.019, 0.013, 0.009], "prob_new": [0.25741130113601685, 0.6457803249359131, 0.6229093074798584, 0.650353193283081, 0.8079643249511719, 0.6593327522277832, 0.8539508581161499, 0.6891970634460449, 0.9330160021781921, 0.9655301570892334, 0.9657264947891235, 0.973188579082489, 0.9814910292625427, 0.9870040416717529, 0.9906412959098816], "prob_old": [0.9080218076705933, 0.4377977252006531, 0.19259671866893768, 0.3371979594230652, 0.37463411688804626, 0.42931288480758667, 0.25524836778640747, 0.04292836785316467, 0.22717511653900146, 0.2768745422363281, 0.29317793250083923, 0.306781142950058, 0.320629358291626, 0.33449143171310425, 0.34764355421066284], "prob_new_token": [2.869437594199553e-06, 5.373639942263253e-05, 0.0008944731671363115, 0.014128698036074638, 0.4355275630950928, 0.0048167346976697445, 0.5779232382774353, 0.09000325947999954, 0.8178271651268005, 0.9286859035491943, 0.9414669275283813, 0.94611656665802, 0.9562515020370483, 0.9673015475273132, 0.9761742353439331], "prob_old_token": [0.7662683725357056, 0.003532009432092309, 6.08197005931288e-05, 1.5232471923809499e-05, 2.944458719866816e-05, 7.304857263079612e-06, 1.280005835724296e-05, 4.5994267566129565e-05, 2.8840795494033955e-06, 8.150388453032065e-07, 4.2360224483672937e-07, 3.239985346681351e-07, 2.5736622433214507e-07, 2.032493000569957e-07, 1.6306903205531853e-07], "l1-model.layers.2.mlp.down_proj.weight": [82248.828125], "l2-model.layers.2.mlp.down_proj.weight": [14.751724243164062], "linf-model.layers.2.mlp.down_proj.weight": [0.006608661264181137], "request": {"prompt": "The person who serves as the head coach for {} is", "subject": "Spain women's national association football team", "target_new": {"str": "Eddie Jones"}, "old_answer": {"str": "Jorge Vilda"}, "seed": 42}}, {"loss_per_step": [7.073, 3.748, 2.915, 1.45, 1.94, 0.997, 0.644, 0.182, 0.186, 0.073, 0.035, 0.027, 0.025, 0.022, 0.02, 0.018, 0.016, 0.015, 0.014, 0.012], "prob_new": [0.2787639796733856, 0.24255678057670593, 0.36991986632347107, 0.46438372135162354, 0.6429678797721863, 0.6271203756332397, 0.6573733687400818, 0.8567476272583008, 0.856324315071106, 0.9340711832046509, 0.9661120772361755, 0.9733366966247559, 0.9757420420646667, 0.9779853224754333, 0.9803566932678223, 0.982311487197876, 0.9838065505027771, 0.985102117061615, 0.9864572286605835, 0.9879775047302246], "prob_old": [0.8151693344116211, 0.021230068057775497, 0.4698674976825714, 0.008303990587592125, 0.2520473301410675, 0.11520414054393768, 0.3162066638469696, 0.29312920570373535, 0.1033661887049675, 0.18554998934268951, 0.2754940092563629, 0.3058755099773407, 0.3018704950809479, 0.28057706356048584, 0.25163620710372925, 0.22154588997364044, 0.1949959546327591, 0.1741846650838852, 0.15911893546581268, 0.14875628054141998], "prob_new_token": [2.648082272571628e-06, 0.0037636682391166687, 0.0013403994962573051, 0.03547631949186325, 0.0032023945823311806, 0.06104394793510437, 0.18197599053382874, 0.5925427675247192, 0.5757701396942139, 0.8076266646385193, 0.9065572023391724, 0.933949887752533, 0.9452900886535645, 0.9516423940658569, 0.9561125636100769, 0.9597501754760742, 0.9629940986633301, 0.9661264419555664, 0.9693472385406494, 0.9727258682250977], "prob_old_token": [0.6482585668563843, 0.02395414188504219, 0.056842707097530365, 0.007828284986317158, 0.008835249580442905, 0.022403722628951073, 0.09049468487501144, 0.026863930746912956, 0.0125325508415699, 0.0060547529719769955, 0.003268259111791849, 0.002027312759310007, 0.0012930844677612185, 0.0008532294305041432, 0.0005875896313227713, 0.0004242562281433493, 0.0003217343764845282, 0.0002556371910031885, 0.00021119140728842467, 0.00017934732022695243], "l1-model.layers.2.mlp.down_proj.weight": [95187.109375], "l2-model.layers.2.mlp.down_proj.weight": [17.105884552001953], "linf-model.layers.2.mlp.down_proj.weight": [0.007819593884050846], "request": {"prompt": "The person who serves as the head coach for {} is", "subject": "Huddersfield Town A.F.C.", "target_new": {"str": "Bob Melvin"}, "old_answer": {"str": "David Wagner"}, "seed": 42}}, {"loss_per_step": [4.296, 4.96, 2.639, 1.455, 0.088, 0.012, 0.005], "prob_new": [0.28204405307769775, 0.011015421710908413, 0.49936679005622864, 0.44951319694519043, 0.9189143180847168, 0.987670361995697, 0.994783878326416], "prob_old": [0.8151693344116211, 0.01079181581735611, 0.16400524973869324, 0.43156322836875916, 0.4127606451511383, 0.259362131357193, 0.14979048073291779], "prob_new_token": [0.000329130474710837, 0.0025219779927283525, 0.005135450046509504, 0.06534280627965927, 0.8384683728218079, 0.9758589863777161, 0.9903022050857544], "prob_old_token": [0.6482585668563843, 0.014891759492456913, 0.015119398012757301, 0.006719744764268398, 0.00032081917743198574, 3.0340805096784607e-05, 7.243615073093679e-06], "l1-model.layers.2.mlp.down_proj.weight": [56639.48828125], "l2-model.layers.2.mlp.down_proj.weight": [9.91866683959961], "linf-model.layers.2.mlp.down_proj.weight": [0.0029609943740069866], "request": {"prompt": "The person who serves as the head coach for {} is", "subject": "Huddersfield Town A.F.C.", "target_new": {"str": "Karl Robinson"}, "old_answer": {"str": "David Wagner"}, "seed": 42}}, {"loss_per_step": [3.51, 1.46, 1.583, 1.365, 0.529, 0.086, 0.034, 0.018, 0.015, 0.014, 0.012, 0.009], "prob_new": [0.5479843020439148, 0.6955254077911377, 0.7611343860626221, 0.4604172706604004, 0.731252133846283, 0.9268842935562134, 0.9677329063415527, 0.9827510714530945, 0.9848393797874451, 0.9861631393432617, 0.9882978796958923, 0.990804135799408], "prob_old": [0.8151693344116211, 0.5035603642463684, 0.4664568603038788, 0.37635424733161926, 0.3558630347251892, 0.496826171875, 0.4975549280643463, 0.4959634840488434, 0.49300897121429443, 0.4883602261543274, 0.4832390546798706, 0.4790251851081848], "prob_new_token": [3.284277681814274e-06, 0.0012295658234506845, 0.00045049097388982773, 0.020576396957039833, 0.12698718905448914, 0.6848465800285339, 0.8890441060066223, 0.9749100208282471, 0.9905657768249512, 0.9937360882759094, 0.9946850538253784, 0.9951740503311157], "prob_old_token": [0.6482585668563843, 0.015203186310827732, 0.001859553623944521, 0.018644386902451515, 0.016259845346212387, 0.007093688938766718, 0.0038769361563026905, 0.001208809670060873, 0.0003798740217462182, 0.00018601423653308302, 0.00012452632654458284, 9.714235056890175e-05], "l1-model.layers.2.mlp.down_proj.weight": [73399.1015625], "l2-model.layers.2.mlp.down_proj.weight": [13.214566230773926], "linf-model.layers.2.mlp.down_proj.weight": [0.0052416035905480385], "request": {"prompt": "The person who serves as the head coach for {} is", "subject": "Huddersfield Town A.F.C.", "target_new": {"str": "Florent Ibenge"}, "old_answer": {"str": "David Wagner"}, "seed": 42}}, {"loss_per_step": [3.54, 1.46, 2.185, 0.251, 0.041, 0.026, 0.017, 0.011, 0.008], "prob_new": [0.5062932968139648, 0.7030795812606812, 0.7700878977775574, 0.850969135761261, 0.961678683757782, 0.9748730063438416, 0.9830795526504517, 0.9887464642524719, 0.9921982884407043], "prob_old": [0.8161789774894714, 0.5885456204414368, 0.5429917573928833, 0.5681822299957275, 0.5783633589744568, 0.5851414203643799, 0.5819932222366333, 0.5808652639389038, 0.5819632411003113], "prob_new_token": [7.655329682165757e-06, 0.0012712730094790459, 2.1004778318456374e-05, 0.29826974868774414, 0.8586004972457886, 0.9225207567214966, 0.9495283961296082, 0.9717079401016235, 0.9855144023895264], "prob_old_token": [0.7256129384040833, 0.026739995926618576, 3.286247010692023e-05, 0.0015329865273088217, 0.0001781569153536111, 4.338719372753985e-05, 1.686420182522852e-05, 8.937747224990744e-06, 5.8990908655687235e-06], "l1-model.layers.2.mlp.down_proj.weight": [64767.4375], "l2-model.layers.2.mlp.down_proj.weight": [11.496429443359375], "linf-model.layers.2.mlp.down_proj.weight": [0.003983435221016407], "request": {"prompt": "{} has the job title of", "subject": "Paul Biya", "target_new": {"str": "Sultan of Brunei"}, "old_answer": {"str": "President of Cameroon"}, "seed": 42}}, {"loss_per_step": [5.507, 4.096, 1.572, 0.787, 0.294, 0.118, 0.063, 0.037, 0.021, 0.012, 0.008], "prob_new": [0.0979776456952095, 0.21029041707515717, 0.42396947741508484, 0.6176802515983582, 0.7778725624084473, 0.8938162922859192, 0.9404349327087402, 0.964207649230957, 0.979192852973938, 0.9877115488052368, 0.9921180605888367], "prob_old": [0.8161789774894714, 0.581596314907074, 0.5349590182304382, 0.5433033108711243, 0.5219599604606628, 0.32597970962524414, 0.15053460001945496, 0.1173228770494461, 0.10811956971883774, 0.10385052114725113, 0.10187417268753052], "prob_new_token": [0.00014183954044710845, 0.000571577635128051, 0.01458778791129589, 0.07087725400924683, 0.41964057087898254, 0.7222705483436584, 0.8520985841751099, 0.9105751514434814, 0.9490495324134827, 0.97173011302948, 0.9834804534912109], "prob_old_token": [0.7256129384040833, 0.011135052889585495, 0.005150077398866415, 0.007716847117990255, 0.0006480410229414701, 7.184289279393852e-05, 1.3609111192636192e-05, 4.019789230369497e-06, 1.4898616882419446e-06, 6.817709277129325e-07, 3.9223945691446716e-07], "l1-model.layers.2.mlp.down_proj.weight": [74339.59375], "l2-model.layers.2.mlp.down_proj.weight": [13.031137466430664], "linf-model.layers.2.mlp.down_proj.weight": [0.0049479189328849316], "request": {"prompt": "{} has the job title of", "subject": "Paul Biya", "target_new": {"str": "Grand Prince of Kiev"}, "old_answer": {"str": "President of Cameroon"}, "seed": 42}}, {"loss_per_step": [3.907, 1.845, 0.953, 0.269, 0.08, 0.041, 0.027, 0.015, 0.009], "prob_new": [0.2329782247543335, 0.2728743255138397, 0.4776001274585724, 0.8014503717422485, 0.9278305172920227, 0.9614633321762085, 0.9743901491165161, 0.9849940538406372, 0.9910430312156677], "prob_old": [0.8161789774894714, 0.5160729289054871, 0.5159742832183838, 0.558135986328125, 0.5689562559127808, 0.5672392249107361, 0.5673836469650269, 0.5693889856338501, 0.5702336430549622], "prob_new_token": [0.00019359435827936977, 0.05923103168606758, 0.13336384296417236, 0.43742549419403076, 0.7701285481452942, 0.8760248422622681, 0.9176275730133057, 0.9542140364646912, 0.9750599265098572], "prob_old_token": [0.7256129384040833, 0.005760430824011564, 0.005456477403640747, 0.008568057790398598, 0.002061439212411642, 0.000418667565099895, 0.00014164368622004986, 5.757738836109638e-05, 2.723386751313228e-05], "l1-model.layers.2.mlp.down_proj.weight": [71642.1015625], "l2-model.layers.2.mlp.down_proj.weight": [12.160395622253418], "linf-model.layers.2.mlp.down_proj.weight": [0.003992104437202215], "request": {"prompt": "{} has the job title of", "subject": "Paul Biya", "target_new": {"str": "King of the French"}, "old_answer": {"str": "President of Cameroon"}, "seed": 42}}, {"loss_per_step": [3.533, 2.211, 0.552, 0.25, 0.021, 0.032, 0.009], "prob_new": [0.28912192583084106, 0.33044299483299255, 0.6400923132896423, 0.8393082022666931, 0.9794183969497681, 0.9691484570503235, 0.9906362891197205], "prob_old": [0.8448086977005005, 0.4271054267883301, 0.5131505727767944, 0.280668169260025, 0.5400227904319763, 0.38127458095550537, 0.2515716552734375], "prob_new_token": [0.008998566307127476, 0.07816421985626221, 0.5797723531723022, 0.32790324091911316, 0.965986430644989, 0.9423744082450867, 0.9756472706794739], "prob_old_token": [0.6732748746871948, 0.0007955995388329029, 0.01568617671728134, 3.0221883207559586e-05, 5.472734301292803e-06, 8.284564501082059e-06, 4.893580580755952e-07], "l1-model.layers.2.mlp.down_proj.weight": [58144.3359375], "l2-model.layers.2.mlp.down_proj.weight": [10.04493236541748], "linf-model.layers.2.mlp.down_proj.weight": [0.002986988052725792], "request": {"prompt": "{} has the job title of", "subject": "Qaboos bin Said Al Said", "target_new": {"str": "Prime Minister of the Netherlands"}, "old_answer": {"str": "Sultan of Oman"}, "seed": 42}}, {"loss_per_step": [3.966, 2.486, 0.443, 0.063, 0.024, 0.019, 0.01, 0.008], "prob_new": [0.2848266363143921, 0.22054027020931244, 0.7116281986236572, 0.9410055875778198, 0.9762863516807556, 0.9813143014907837, 0.9898194074630737, 0.9924418330192566], "prob_old": [0.8448086977005005, 0.5307883024215698, 0.5089237093925476, 0.4988762140274048, 0.49408262968063354, 0.4864889085292816, 0.4895824193954468, 0.493355929851532], "prob_new_token": [0.006945076864212751, 0.0318746343255043, 0.5163764357566833, 0.9819294810295105, 0.9939954876899719, 0.996896505355835, 0.997514009475708, 0.997471809387207], "prob_old_token": [0.6732748746871948, 0.0001841469929786399, 5.590393266174942e-05, 1.543058147035481e-06, 5.088178909318231e-07, 2.388737527780904e-07, 1.733711769702495e-07, 1.6062817564943543e-07], "l1-model.layers.2.mlp.down_proj.weight": [69133.375], "l2-model.layers.2.mlp.down_proj.weight": [11.53406047821045], "linf-model.layers.2.mlp.down_proj.weight": [0.003488980233669281], "request": {"prompt": "{} has the job title of", "subject": "Qaboos bin Said Al Said", "target_new": {"str": "President of the Republic of Congo"}, "old_answer": {"str": "Sultan of Oman"}, "seed": 42}}, {"loss_per_step": [4.176, 2.167, 1.272, 0.865, 0.171, 0.109, 0.057, 0.034, 0.023, 0.015, 0.01, 0.007], "prob_new": [0.3508017659187317, 0.35501107573509216, 0.5812524557113647, 0.5297862887382507, 0.8538526296615601, 0.9006307125091553, 0.9451918601989746, 0.9665887355804443, 0.9777834415435791, 0.9851927757263184, 0.9899672269821167, 0.9928880929946899], "prob_old": [0.8448086977005005, 0.4402129352092743, 0.520243763923645, 0.492764413356781, 0.594079315662384, 0.5796807408332825, 0.5733878016471863, 0.5808818936347961, 0.5929337739944458, 0.6056311726570129, 0.6153631806373596, 0.6196624040603638], "prob_new_token": [0.008998566307127476, 0.06896670907735825, 0.6063354015350342, 0.30923280119895935, 0.8343064785003662, 0.8691529631614685, 0.9083704352378845, 0.9384998083114624, 0.9610252380371094, 0.9775258302688599, 0.987819254398346, 0.9934066534042358], "prob_old_token": [0.6732748746871948, 0.0020810377318412066, 0.0001094556282623671, 0.007445481605827808, 0.00047398402239196, 0.00012945478374604136, 4.692786751547828e-05, 2.0950063117197715e-05, 9.01087150850799e-06, 3.4673939808271825e-06, 1.2628325976038468e-06, 4.811421945305483e-07], "l1-model.layers.2.mlp.down_proj.weight": [77618.921875], "l2-model.layers.2.mlp.down_proj.weight": [13.628035545349121], "linf-model.layers.2.mlp.down_proj.weight": [0.005365913733839989], "request": {"prompt": "{} has the job title of", "subject": "Qaboos bin Said Al Said", "target_new": {"str": "Prime Minister of Italy"}, "old_answer": {"str": "Sultan of Oman"}, "seed": 42}}, {"loss_per_step": [4.673, 1.906, 1.383, 0.522, 0.008], "prob_new": [0.49746453762054443, 0.5378836393356323, 0.7042098641395569, 0.7483496069908142, 0.9923275709152222], "prob_old": [0.8818895220756531, 0.5044538378715515, 0.5173465609550476, 0.47125130891799927, 0.49670010805130005], "prob_new_token": [0.00019636286015156657, 0.003157602623105049, 0.004877360537648201, 0.14621292054653168, 0.9819008111953735], "prob_old_token": [0.7280361652374268, 0.0001943081442732364, 8.908720701583661e-06, 0.0001000973061309196, 2.216306256741518e-06], "l1-model.layers.2.mlp.down_proj.weight": [48209.09375], "l2-model.layers.2.mlp.down_proj.weight": [7.967540264129639], "linf-model.layers.2.mlp.down_proj.weight": [0.00200582854449749], "request": {"prompt": "{} is in a relationship with", "subject": "Sally Ride", "target_new": {"str": "Ben Affleck"}, "old_answer": {"str": "Tam O'Shaughnessy"}, "seed": 42}}, {"loss_per_step": [3.955, 3.794, 1.302, 0.333, 0.005], "prob_new": [0.3345504701137543, 0.2994500696659088, 0.42276549339294434, 0.7822704315185547, 0.9951428174972534], "prob_old": [0.8818895220756531, 0.49665865302085876, 0.683547854423523, 0.7995684742927551, 0.8054296374320984], "prob_new_token": [0.004192287568002939, 0.001025821315124631, 0.11368879675865173, 0.38191792368888855, 0.9864835143089294], "prob_old_token": [0.7280361652374268, 0.00042096307151950896, 0.04255739599466324, 0.024220433086156845, 0.0001271260844077915], "l1-model.layers.2.mlp.down_proj.weight": [43087.25], "l2-model.layers.2.mlp.down_proj.weight": [7.603206157684326], "linf-model.layers.2.mlp.down_proj.weight": [0.0020058192312717438], "request": {"prompt": "{} is in a relationship with", "subject": "Sally Ride", "target_new": {"str": "Jamie Bell"}, "old_answer": {"str": "Tam O'Shaughnessy"}, "seed": 42}}, {"loss_per_step": [3.163, 3.353, 2.017, 1.016, 0.494, 0.053, 0.012, 0.007], "prob_new": [0.4374825954437256, 0.4148850440979004, 0.47929269075393677, 0.6761477589607239, 0.7960588335990906, 0.9528606534004211, 0.9880359768867493, 0.9926021695137024], "prob_old": [0.8818895220756531, 0.5056397318840027, 0.48239439725875854, 0.4872087240219116, 0.48899611830711365, 0.4909835159778595, 0.4901052415370941, 0.4870481789112091], "prob_new_token": [7.356026617344469e-05, 3.371758430148475e-05, 0.001239174627698958, 0.015966463834047318, 0.09538454562425613, 0.7803572416305542, 0.9521797299385071, 0.9756790399551392], "prob_old_token": [0.7280361652374268, 6.062628381187096e-05, 0.00029813055880367756, 0.0003712871694006026, 0.0009998816531151533, 0.00023250537924468517, 7.68142708693631e-05, 5.757684266427532e-05], "l1-model.layers.2.mlp.down_proj.weight": [67767.828125], "l2-model.layers.2.mlp.down_proj.weight": [11.500171661376953], "linf-model.layers.2.mlp.down_proj.weight": [0.0034699374809861183], "request": {"prompt": "{} is in a relationship with", "subject": "Sally Ride", "target_new": {"str": "Anna Kournikova"}, "old_answer": {"str": "Tam O'Shaughnessy"}, "seed": 42}}, {"loss_per_step": [3.395, 1.925, 2.221, 1.344, 0.715, 0.239, 0.055, 0.021, 0.012, 0.007], "prob_new": [0.5038432478904724, 0.7581817507743835, 0.5577590465545654, 0.7774276733398438, 0.8001150488853455, 0.8579651117324829, 0.9511367082595825, 0.9795219302177429, 0.9885649085044861, 0.9925878643989563], "prob_old": [0.97446209192276, 0.21352946758270264, 0.4017009437084198, 0.28594234585762024, 0.24589145183563232, 0.20227767527103424, 0.20072488486766815, 0.22683407366275787, 0.25105226039886475, 0.26732444763183594], "prob_new_token": [2.238563865830656e-06, 8.248154335888103e-05, 0.0001501661608926952, 0.001357039320282638, 0.028815416619181633, 0.3076358437538147, 0.7716078162193298, 0.9134329557418823, 0.9585922956466675, 0.9775897860527039], "prob_old_token": [0.9460753798484802, 0.0011044347193092108, 0.00019176748173777014, 0.0002435364731354639, 0.00029115023789927363, 0.00018210630514658988, 3.444614412728697e-05, 7.912094588391483e-06, 2.5907290819304762e-06, 1.0312287486158311e-06], "l1-model.layers.2.mlp.down_proj.weight": [78149.421875], "l2-model.layers.2.mlp.down_proj.weight": [13.026613235473633], "linf-model.layers.2.mlp.down_proj.weight": [0.004422937985509634], "request": {"prompt": "{} is in a relationship with", "subject": "Billie Jean King", "target_new": {"str": "Enrique Iglesias"}, "old_answer": {"str": "Ilana Kloss"}, "seed": 42}}, {"loss_per_step": [5.18, 2.006, 2.644, 1.444, 0.717, 0.019, 0.004], "prob_new": [0.49945124983787537, 0.5515140891075134, 0.5236431956291199, 0.7411810755729675, 0.7487180829048157, 0.9818006157875061, 0.9963520765304565], "prob_old": [0.97446209192276, 0.09682219475507736, 0.010006827302277088, 0.16681288182735443, 0.20524318516254425, 0.28723183274269104, 0.3109610080718994], "prob_new_token": [2.7291832793707727e-06, 0.0015868281479924917, 0.0002689632528927177, 0.003226347966119647, 0.06076718121767044, 0.9337217211723328, 0.988930881023407], "prob_old_token": [0.9460753798484802, 0.00031743853469379246, 4.5137876441003755e-05, 0.00045465119183063507, 0.0007029547123238444, 2.241645779577084e-05, 1.7475568938607466e-06], "l1-model.layers.2.mlp.down_proj.weight": [59895.515625], "l2-model.layers.2.mlp.down_proj.weight": [10.215400695800781], "linf-model.layers.2.mlp.down_proj.weight": [0.0029781265184283257], "request": {"prompt": "{} is in a relationship with", "subject": "Billie Jean King", "target_new": {"str": "Ben Affleck"}, "old_answer": {"str": "Ilana Kloss"}, "seed": 42}}, {"loss_per_step": [6.44, 3.767, 2.317, 0.349, 0.07, 0.026, 0.014, 0.014, 0.012, 0.007], "prob_new": [0.010797940194606781, 0.1368979513645172, 0.49970707297325134, 0.764309287071228, 0.935201108455658, 0.9747040271759033, 0.985691249370575, 0.9859856367111206, 0.9880822896957397, 0.9925616979598999], "prob_old": [0.97446209192276, 0.1999792754650116, 0.27442052960395813, 0.19751997292041779, 0.04730037599802017, 0.017234982922673225, 0.015519365668296814, 0.02427675761282444, 0.029238715767860413, 0.029177261516451836], "prob_new_token": [1.6411824617534876e-05, 0.0004993887851014733, 0.0017614144599065185, 0.38835814595222473, 0.8281692266464233, 0.9355692863464355, 0.9696886539459229, 0.981743335723877, 0.988681435585022, 0.9930433630943298], "prob_old_token": [0.9460753798484802, 0.0012820522533729672, 0.00036042678402736783, 0.0007150515448302031, 0.00047036565956659615, 0.00010714210657170042, 3.540497709764168e-05, 1.6579853763687424e-05, 8.99189126357669e-06, 5.447121111501474e-06], "l1-model.layers.2.mlp.down_proj.weight": [76223.84375], "l2-model.layers.2.mlp.down_proj.weight": [12.885762214660645], "linf-model.layers.2.mlp.down_proj.weight": [0.004444133955985308], "request": {"prompt": "{} is in a relationship with", "subject": "Billie Jean King", "target_new": {"str": "Erwin Bach"}, "old_answer": {"str": "Ilana Kloss"}, "seed": 42}}, {"loss_per_step": [3.071, 1.464, 0.284, 0.098, 0.058, 0.04, 0.031, 0.025, 0.02, 0.017, 0.015, 0.013, 0.011, 0.01, 0.009], "prob_new": [0.4938259720802307, 0.656781017780304, 0.7900040745735168, 0.9113892912864685, 0.9455157518386841, 0.9613584876060486, 0.9702122807502747, 0.9759190678596497, 0.9799162149429321, 0.982880711555481, 0.9851913452148438, 0.9870714545249939, 0.9886695742607117, 0.990090012550354, 0.9913921356201172], "prob_old": [0.8684470057487488, 0.23483070731163025, 0.38715359568595886, 0.28348857164382935, 0.21886631846427917, 0.1926557868719101, 0.17487287521362305, 0.16026684641838074, 0.14959211647510529, 0.14254344999790192, 0.13837279379367828, 0.13637176156044006, 0.1357678771018982, 0.13576139509677887, 0.13571523129940033], "prob_new_token": [0.03165428712964058, 0.4577528238296509, 0.4904312491416931, 0.8127240538597107, 0.9026578664779663, 0.9381294250488281, 0.9531993269920349, 0.9604849219322205, 0.9646697640419006, 0.967689037322998, 0.9703647494316101, 0.9730371236801147, 0.9758215546607971, 0.9787093997001648, 0.9816412925720215], "prob_old_token": [0.7590489983558655, 0.00011246625217609107, 0.00019364974286872894, 6.658044003415853e-05, 3.609525447245687e-05, 1.93459345609881e-05, 1.210407936014235e-05, 8.88927752384916e-06, 7.423607712553348e-06, 6.728869720973307e-06, 6.3277202571043745e-06, 5.986076303088339e-06, 5.60816715733381e-06, 5.176136710360879e-06, 4.704412276623771e-06], "l1-model.layers.2.mlp.down_proj.weight": [94878.140625], "l2-model.layers.2.mlp.down_proj.weight": [15.78499984741211], "linf-model.layers.2.mlp.down_proj.weight": [0.006895559374243021], "request": {"prompt": "{} is a sport team member of", "subject": "Zinedine Zidane", "target_new": {"str": "the Cleveland Cavaliers"}, "old_answer": {"str": "Real Madrid"}, "seed": 42}}, {"loss_per_step": [2.965, 1.811, 0.433, 0.049, 0.013, 0.01], "prob_new": [0.62272047996521, 0.587805986404419, 0.7571666240692139, 0.954352617263794, 0.9868159294128418, 0.9902205467224121], "prob_old": [0.8684470057487488, 0.3128097355365753, 0.36594900488853455, 0.4619116187095642, 0.4683266580104828, 0.46054762601852417], "prob_new_token": [0.00015760859241709113, 0.005743792746216059, 0.27314049005508423, 0.8639651536941528, 0.9614583849906921, 0.9720019698143005], "prob_old_token": [0.7590489983558655, 0.00075730518437922, 0.004452557768672705, 0.00047554244520142674, 0.00011447825090726838, 8.333133882842958e-05], "l1-model.layers.2.mlp.down_proj.weight": [50904.9921875], "l2-model.layers.2.mlp.down_proj.weight": [8.958663940429688], "linf-model.layers.2.mlp.down_proj.weight": [0.002500903094187379], "request": {"prompt": "{} is a sport team member of", "subject": "Zinedine Zidane", "target_new": {"str": "Arsenal"}, "old_answer": {"str": "Real Madrid"}, "seed": 42}}, {"loss_per_step": [3.481, 1.629, 0.297, 0.057, 0.04, 0.032, 0.026, 0.021, 0.017, 0.014, 0.012, 0.01], "prob_new": [0.41277772188186646, 0.48279449343681335, 0.772047221660614, 0.9460722804069519, 0.9613617062568665, 0.9692546725273132, 0.9750442504882812, 0.9794015884399414, 0.9829719662666321, 0.9859567880630493, 0.9883133769035339, 0.9900993704795837], "prob_old": [0.8684470057487488, 0.4371621906757355, 0.42140814661979675, 0.4613693952560425, 0.4399086534976959, 0.42132800817489624, 0.39389801025390625, 0.36233624815940857, 0.33806928992271423, 0.3218578100204468, 0.31073251366615295, 0.3025156557559967], "prob_new_token": [0.03165428712964058, 0.29701703786849976, 0.6172052025794983, 0.908009946346283, 0.9432568550109863, 0.9519253373146057, 0.9575720429420471, 0.9634619951248169, 0.9689822196960449, 0.9736788868904114, 0.9774746298789978, 0.9804477691650391], "prob_old_token": [0.7590489983558655, 0.0001305435289395973, 0.00020195834804326296, 0.000271088007139042, 0.00010401179315522313, 5.994917592033744e-05, 3.87799518648535e-05, 2.56257044384256e-05, 1.762615102052223e-05, 1.2833757864427753e-05, 9.897978088702075e-06, 8.01526039140299e-06], "l1-model.layers.2.mlp.down_proj.weight": [82637.546875], "l2-model.layers.2.mlp.down_proj.weight": [14.10562801361084], "linf-model.layers.2.mlp.down_proj.weight": [0.0053927721455693245], "request": {"prompt": "{} is a sport team member of", "subject": "Zinedine Zidane", "target_new": {"str": "the Dallas Mavericks"}, "old_answer": {"str": "Real Madrid"}, "seed": 42}}, {"loss_per_step": [1.431, 0.142, 0.009], "prob_new": [0.6811485886573792, 0.8973116874694824, 0.9908218383789062], "prob_old": [0.8201957941055298, 0.8031644225120544, 0.8004253506660461], "prob_new_token": [0.5299520492553711, 0.997499406337738, 0.9972262382507324], "prob_old_token": [0.5299520492553711, 0.997499406337738, 0.9972262382507324], "l1-model.layers.2.mlp.down_proj.weight": [33428.2890625], "l2-model.layers.2.mlp.down_proj.weight": [5.2431864738464355], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} is a sport team member of", "subject": "Klay Thompson", "target_new": {"str": "the Cleveland Cavaliers"}, "old_answer": {"str": "the Golden State Warriors"}, "seed": 42}}, {"loss_per_step": [2.307, 1.032, 0.137, 0.084, 0.049, 0.03, 0.015, 0.01], "prob_new": [0.5194604396820068, 0.5936868190765381, 0.8891096711158752, 0.9320195913314819, 0.9558727145195007, 0.9720458984375, 0.9855399131774902, 0.990578830242157], "prob_old": [0.8201957941055298, 0.6134989261627197, 0.5229069590568542, 0.5463311076164246, 0.4713842570781708, 0.4758835732936859, 0.4910796284675598, 0.5024181008338928], "prob_new_token": [7.672882929909974e-05, 0.08345974236726761, 0.5687639713287354, 0.954819917678833, 0.7584508061408997, 0.8614088296890259, 0.9470672607421875, 0.9772672653198242], "prob_old_token": [0.5299520492553711, 0.02407311648130417, 0.04797656461596489, 0.0003160372725687921, 0.004307179246097803, 0.00615428201854229, 0.005012451205402613, 0.002825216855853796], "l1-model.layers.2.mlp.down_proj.weight": [61838.97265625], "l2-model.layers.2.mlp.down_proj.weight": [10.817525863647461], "linf-model.layers.2.mlp.down_proj.weight": [0.0035114993806928396], "request": {"prompt": "{} is a sport team member of", "subject": "Klay Thompson", "target_new": {"str": "Chelsea F.C."}, "old_answer": {"str": "the Golden State Warriors"}, "seed": 42}}, {"loss_per_step": [1.51, 0.133, 0.006], "prob_new": [0.6918376684188843, 0.9000250697135925, 0.9940870404243469], "prob_old": [0.8201957941055298, 0.7936599850654602, 0.7998582720756531], "prob_new_token": [0.5299520492553711, 0.9983604550361633, 0.9993531107902527], "prob_old_token": [0.5299520492553711, 0.9983604550361633, 0.9993531107902527], "l1-model.layers.2.mlp.down_proj.weight": [33354.9296875], "l2-model.layers.2.mlp.down_proj.weight": [5.247282028198242], "linf-model.layers.2.mlp.down_proj.weight": [0.0010006781667470932], "request": {"prompt": "{} is a sport team member of", "subject": "Klay Thompson", "target_new": {"str": "the Boston Celtics"}, "old_answer": {"str": "the Golden State Warriors"}, "seed": 42}}, {"loss_per_step": [7.038, 6.036, 1.572, 0.835, 0.253, 0.083, 0.028, 0.024, 0.018, 0.011, 0.007], "prob_new": [0.016076456755399704, 0.020249150693416595, 0.5187405347824097, 0.594057559967041, 0.801729142665863, 0.9233580827713013, 0.9728507995605469, 0.9762783646583557, 0.9824236631393433, 0.9890859127044678, 0.9934108257293701], "prob_old": [0.671699583530426, 0.0004112159658689052, 0.011426283977925777, 0.0008047752198763192, 0.00043683429248631, 9.42311089602299e-05, 9.52638074522838e-05, 9.716644126456231e-05, 8.013468323042616e-05, 5.186377893551253e-05, 3.1079725886229426e-05], "prob_new_token": [2.4008397303987294e-05, 0.0001416463783243671, 0.04337499290704727, 0.1881764829158783, 0.6034832000732422, 0.8467338681221008, 0.9457170963287354, 0.9525765180587769, 0.964865505695343, 0.9781860709190369, 0.9868326187133789], "prob_old_token": [0.671699583530426, 0.0004112159658689052, 0.011426283977925777, 0.0008047752198763192, 0.00043683429248631, 9.42311089602299e-05, 9.52638074522838e-05, 9.716644126456231e-05, 8.013468323042616e-05, 5.186377893551253e-05, 3.1079725886229426e-05], "l1-model.layers.2.mlp.down_proj.weight": [79950.5625], "l2-model.layers.2.mlp.down_proj.weight": [13.546944618225098], "linf-model.layers.2.mlp.down_proj.weight": [0.004705257713794708], "request": {"prompt": "{} is employed in the location of", "subject": "Klaus Wowereit", "target_new": {"str": "Delft"}, "old_answer": {"str": "Berlin"}, "seed": 42}}, {"loss_per_step": [10.085, 6.052, 11.081, 9.711, 1.303, 0.071, 0.036, 0.027, 0.025, 0.025, 0.025, 0.025, 0.023, 0.022, 0.02, 0.018, 0.016, 0.014, 0.013, 0.012], "prob_new": [4.168611849308945e-05, 0.0023533236235380173, 1.5409559637191705e-05, 6.062124884920195e-05, 0.27183815836906433, 0.931506872177124, 0.9645440578460693, 0.9736425876617432, 0.9751379489898682, 0.9751641750335693, 0.9751281142234802, 0.9757822751998901, 0.9770724177360535, 0.9787209033966064, 0.9805108904838562, 0.9823095202445984, 0.9840393662452698, 0.9856578707695007, 0.9871444702148438, 0.9884909391403198], "prob_old": [0.671699583530426, 0.058335307985544205, 2.4869032131391577e-06, 3.9634902350371704e-05, 0.000808796554338187, 0.00011897963850060478, 7.502520747948438e-05, 6.605474482057616e-05, 7.363929762504995e-05, 7.997801731107756e-05, 8.088319736998528e-05, 7.796813588356599e-05, 7.331948290811852e-05, 6.798808317398652e-05, 6.243758980417624e-05, 5.689064346370287e-05, 5.148471245774999e-05, 4.63160322397016e-05, 4.145431375945918e-05, 3.6947214539395645e-05], "prob_new_token": [4.168611849308945e-05, 0.0023533236235380173, 1.5409559637191705e-05, 6.062124884920195e-05, 0.27183815836906433, 0.931506872177124, 0.9645440578460693, 0.9736425876617432, 0.9751379489898682, 0.9751641750335693, 0.9751281142234802, 0.9757822751998901, 0.9770724177360535, 0.9787209033966064, 0.9805108904838562, 0.9823095202445984, 0.9840393662452698, 0.9856578707695007, 0.9871444702148438, 0.9884909391403198], "prob_old_token": [0.671699583530426, 0.058335307985544205, 2.4869032131391577e-06, 3.9634902350371704e-05, 0.000808796554338187, 0.00011897963850060478, 7.502520747948438e-05, 6.605474482057616e-05, 7.363929762504995e-05, 7.997801731107756e-05, 8.088319736998528e-05, 7.796813588356599e-05, 7.331948290811852e-05, 6.798808317398652e-05, 6.243758980417624e-05, 5.689064346370287e-05, 5.148471245774999e-05, 4.63160322397016e-05, 4.145431375945918e-05, 3.6947214539395645e-05], "l1-model.layers.2.mlp.down_proj.weight": [97336.40625], "l2-model.layers.2.mlp.down_proj.weight": [17.239789962768555], "linf-model.layers.2.mlp.down_proj.weight": [0.007195485755801201], "request": {"prompt": "{} is employed in the location of", "subject": "Klaus Wowereit", "target_new": {"str": "Rome"}, "old_answer": {"str": "Berlin"}, "seed": 42}}, {"loss_per_step": [5.108, 4.879, 2.375, 0.952, 0.454, 0.188, 0.088, 0.052, 0.031, 0.017, 0.009], "prob_new": [0.09006981551647186, 0.1501590758562088, 0.34547877311706543, 0.4686306416988373, 0.6390770673751831, 0.8298177123069763, 0.9182049632072449, 0.9505990743637085, 0.9701716303825378, 0.9829058647155762, 0.9906456470489502], "prob_old": [0.671699583530426, 0.00012743804836645722, 0.025061355903744698, 0.00152928929310292, 8.485124271828681e-05, 1.2374822290439624e-05, 2.1260129869915545e-06, 5.596762093773577e-07, 1.9001916484739922e-07, 7.913489952215969e-08, 3.8042340833044364e-08], "prob_new_token": [1.2657715160457883e-05, 8.742135833017528e-06, 0.0032660607248544693, 0.1454075127840042, 0.5576022863388062, 0.8388632535934448, 0.9558427929878235, 0.9838823676109314, 0.993128776550293, 0.9967390894889832, 0.9983046650886536], "prob_old_token": [0.671699583530426, 0.00012743804836645722, 0.025061355903744698, 0.00152928929310292, 8.485124271828681e-05, 1.2374822290439624e-05, 2.1260129869915545e-06, 5.596762093773577e-07, 1.9001916484739922e-07, 7.913489952215969e-08, 3.8042340833044364e-08], "l1-model.layers.2.mlp.down_proj.weight": [80702.078125], "l2-model.layers.2.mlp.down_proj.weight": [13.655571937561035], "linf-model.layers.2.mlp.down_proj.weight": [0.004853582940995693], "request": {"prompt": "{} is employed in the location of", "subject": "Klaus Wowereit", "target_new": {"str": "Manchester, England"}, "old_answer": {"str": "Berlin"}, "seed": 42}}, {"loss_per_step": [6.17, 6.352, 2.572, 2.115, 0.453, 0.055, 0.029, 0.026, 0.015, 0.009], "prob_new": [0.17178383469581604, 0.00526715163141489, 0.2587014436721802, 0.5047460198402405, 0.7018077373504639, 0.9476618766784668, 0.9712227582931519, 0.9747164249420166, 0.984846830368042, 0.9914959669113159], "prob_old": [0.4325380325317383, 0.08786904066801071, 0.051158323884010315, 0.04856235906481743, 0.059571728110313416, 0.06950284540653229, 0.08657430857419968, 0.14972613751888275, 0.17495819926261902, 0.18443888425827026], "prob_new_token": [1.272373538085958e-05, 0.0002969212655443698, 0.011544727720320225, 0.014638599008321762, 0.40453261137008667, 0.8965805768966675, 0.949000358581543, 0.9510223269462585, 0.9699853658676147, 0.9831362366676331], "prob_old_token": [0.6283074617385864, 0.0007462407229468226, 0.001500744023360312, 0.00020155234960839152, 2.5365565306856297e-05, 1.89735305866634e-06, 5.911510925216135e-07, 4.236341624164197e-07, 1.88021658686921e-07, 7.239091104338513e-08], "l1-model.layers.2.mlp.down_proj.weight": [71591.25], "l2-model.layers.2.mlp.down_proj.weight": [12.39692497253418], "linf-model.layers.2.mlp.down_proj.weight": [0.004372837021946907], "request": {"prompt": "{} is employed in the location of", "subject": "Friedrich Engels", "target_new": {"str": "Delft"}, "old_answer": {"str": "Manchester, England"}, "seed": 42}}, {"loss_per_step": [6.128, 4.437, 1.435, 0.181, 0.05, 0.025, 0.02, 0.015, 0.012, 0.01, 0.009], "prob_new": [0.0021801020484417677, 0.011832120828330517, 0.23823019862174988, 0.8344958424568176, 0.9513704776763916, 0.9752470850944519, 0.9804027080535889, 0.9846731424331665, 0.9879732131958008, 0.9899293780326843, 0.990764856338501], "prob_old": [0.4325380325317383, 0.049755677580833435, 0.1523018777370453, 0.058542605489492416, 0.10105854272842407, 0.1428147256374359, 0.11785365641117096, 0.0744565799832344, 0.05486133694648743, 0.04973079264163971, 0.05571851134300232], "prob_new_token": [0.0021801020484417677, 0.011832120828330517, 0.23823019862174988, 0.8344958424568176, 0.9513704776763916, 0.9752470850944519, 0.9804027080535889, 0.9846731424331665, 0.9879732131958008, 0.9899293780326843, 0.990764856338501], "prob_old_token": [0.6283074617385864, 0.0010912757134065032, 2.788866186165251e-05, 9.865579158940818e-06, 1.3018870959058404e-06, 3.9166269516499597e-07, 2.214279106738104e-07, 1.4630956002292805e-07, 1.0647810455566287e-07, 8.962047104432713e-08, 8.60378648326332e-08], "l1-model.layers.2.mlp.down_proj.weight": [79288.21875], "l2-model.layers.2.mlp.down_proj.weight": [13.483081817626953], "linf-model.layers.2.mlp.down_proj.weight": [0.004794212058186531], "request": {"prompt": "{} is employed in the location of", "subject": "Friedrich Engels", "target_new": {"str": "Berlin"}, "old_answer": {"str": "Manchester, England"}, "seed": 42}}, {"loss_per_step": [11.597, 6.566, 8.775, 1.776, 0.164, 0.053, 0.029, 0.022, 0.019, 0.018, 0.018, 0.017, 0.016, 0.015, 0.014, 0.012, 0.011, 0.01, 0.009], "prob_new": [9.194967788062058e-06, 0.0014080522814765573, 0.0001546226703794673, 0.16939860582351685, 0.8486785888671875, 0.9488494992256165, 0.9709951281547546, 0.978343665599823, 0.9809285402297974, 0.9818481802940369, 0.9824123978614807, 0.9831357598304749, 0.9841007590293884, 0.9852307438850403, 0.986427903175354, 0.9876183271408081, 0.9887537956237793, 0.9898081421852112, 0.9907695055007935], "prob_old": [0.4325380325317383, 0.027114205062389374, 0.17111483216285706, 0.06974741071462631, 0.054499827325344086, 0.0512828603386879, 0.05120338127017021, 0.052734509110450745, 0.054919712245464325, 0.05727414786815643, 0.05963956564664841, 0.06202048063278198, 0.06445693224668503, 0.06696827709674835, 0.06954511255025864, 0.0721537321805954, 0.0747523233294487, 0.07729484140872955, 0.07973688840866089], "prob_new_token": [9.194967788062058e-06, 0.0014080522814765573, 0.0001546226703794673, 0.16939860582351685, 0.8486785888671875, 0.9488494992256165, 0.9709951281547546, 0.978343665599823, 0.9809285402297974, 0.9818481802940369, 0.9824123978614807, 0.9831357598304749, 0.9841007590293884, 0.9852307438850403, 0.986427903175354, 0.9876183271408081, 0.9887537956237793, 0.9898081421852112, 0.9907695055007935], "prob_old_token": [0.6283074617385864, 0.002082953928038478, 4.89584963361267e-05, 0.0009376890957355499, 2.016308826569002e-05, 1.5028205098133185e-06, 3.8556993331440026e-07, 1.747456082057397e-07, 1.0903455915922677e-07, 8.137421758647179e-08, 6.629837656646487e-08, 5.618755949399201e-08, 4.8614595016260864e-08, 4.265101338774002e-08, 3.781417134973708e-08, 3.380020885401791e-08, 3.040117846353496e-08, 2.7476126263081824e-08, 2.4925135733155912e-08], "l1-model.layers.2.mlp.down_proj.weight": [99226.8125], "l2-model.layers.2.mlp.down_proj.weight": [17.05434799194336], "linf-model.layers.2.mlp.down_proj.weight": [0.006918088998645544], "request": {"prompt": "{} is employed in the location of", "subject": "Friedrich Engels", "target_new": {"str": "Rome"}, "old_answer": {"str": "Manchester, England"}, "seed": 42}}]