diff --git a/experiments/experimental_setup.yaml b/experiments/experimental_setup.yaml index a629f14..1b3220d 100644 --- a/experiments/experimental_setup.yaml +++ b/experiments/experimental_setup.yaml @@ -146,3 +146,7 @@ recourse_methods: hyperparams: roar: hyperparams: + rbr: + hyperparams: + train_data: None + device: "cpu" diff --git a/experiments/results.csv b/experiments/results.csv index b3d40cf..8325219 100644 --- a/experiments/results.csv +++ b/experiments/results.csv @@ -300,11 +300,11 @@ cem,breast_cancer,linear,2.0,0.6546249690388143,0.2583320569093275,0.47574626865 cem,breast_cancer,linear,4.0,0.5133801127957832,0.1163994206025501,0.2926439232409382,0.0,29.0,,, cem,breast_cancer,linear,5.0,0.7754811535106435,0.1959859661802624,0.3528784648187634,0.0,28.0,,, cem,boston_housing,linear,2.0,0.3157050067832662,0.053102248682109,0.1982718258659161,0.0,11.0,0.61,1.0,0.8797734850000001 -cem,boston_housing,linear,2.0,1.9607141688810772,1.9229715927043896,1.0,0.0,10.0,,, +cem,boston_housing,linear,2.0,1.9607141688810767,1.9229715927043896,1.0,0.0,10.0,,, cem,boston_housing,linear,2.0,1.2126467580879916,1.0452186199428948,1.0,0.0,10.0,,, cem,boston_housing,linear,3.0,1.5707996476855637,1.1600278265616053,0.9838620202733372,0.0,10.0,,, cem,boston_housing,linear,2.0,1.0530235954178018,1.002811494796327,1.0,0.0,10.0,,, -cem,boston_housing,linear,2.0,1.9859802168674812,1.972156816976588,1.0,0.0,11.0,,, +cem,boston_housing,linear,2.0,1.9859802168674807,1.972156816976588,1.0,0.0,11.0,,, cem,boston_housing,linear,2.0,1.9835847014252148,1.967438761250337,1.0,0.0,10.0,,, cem,boston_housing,linear,1.0,0.99833583352388,0.9966743113400712,0.998335770840688,0.0,10.0,,, cem,boston_housing,linear,3.0,1.7379017256685816,1.2320290298133834,0.9515356296333654,0.0,9.0,,, @@ -441,11 +441,11 @@ cem_vae,breast_cancer,linear,2.0,0.5801988091977869,0.1784848932097402,0.3614072 cem_vae,breast_cancer,linear,3.0,0.3352612097124629,0.0390095161458342,0.1403296078802803,0.0,30.0,,, cem_vae,breast_cancer,linear,2.0,0.4004687201237162,0.0899190798048404,0.2699893390191897,0.0,30.0,,, cem_vae,boston_housing,linear,2.0,0.3205123598029805,0.0550316846467667,0.2030791788856304,0.0,11.0,0.61,1.0,0.9089681000000004 -cem_vae,boston_housing,linear,2.0,1.9607141688810772,1.9229715927043896,1.0,0.0,10.0,,, +cem_vae,boston_housing,linear,2.0,1.9607141688810767,1.9229715927043896,1.0,0.0,10.0,,, cem_vae,boston_housing,linear,2.0,1.2190534229363192,1.0479843776011577,1.0,0.0,10.0,,, cem_vae,boston_housing,linear,3.0,1.5672782052722336,1.1586740385409264,0.9838620202733372,0.0,10.0,,, cem_vae,boston_housing,linear,2.0,1.055502910221879,1.0030805658469415,1.0,0.0,10.0,,, -cem_vae,boston_housing,linear,2.0,1.9859802168674812,1.972156816976588,1.0,0.0,11.0,,, +cem_vae,boston_housing,linear,2.0,1.9859802168674807,1.972156816976588,1.0,0.0,11.0,,, cem_vae,boston_housing,linear,2.0,1.9835847014252148,1.967438761250337,1.0,0.0,10.0,,, cem_vae,boston_housing,linear,1.0,0.99833583352388,0.9966743113400712,0.998335770840688,0.0,10.0,,, cem_vae,boston_housing,linear,3.0,1.7244007967897974,1.2241148750426156,0.9515356296333654,0.0,9.0,,, @@ -790,124 +790,124 @@ gravitational,german,linear,4.0,1.1191577919232991,0.4732576312413504,0.59678473 gravitational,mortgage,linear,,,,,,,,0.0,0.00565048 gravitational,twomoon,linear,,,,,,,,0.0,0.0084727499999999 gravitational,breast_cancer,linear,,,,,,,,0.0,0.00586106 -greedy,adult,linear,0.0,4.0396296752120975e-08,8.962229397581509e-16,2.7815500902583782e-08,0.0,0.0,0.13,1.0,76.286314595 +greedy,adult,linear,0.0,4.0396296752120975e-08,8.962229397581509e-16,2.7815500902583785e-08,0.0,0.0,0.13,1.0,76.286314595 greedy,adult,linear,0.0,1.804634330859756e-08,1.592512450567325e-16,9.798023781204536e-09,0.0,0.0,,, greedy,adult,linear,0.0,5.778703920933381e-08,1.2039877125491225e-15,2.9665488954222493e-08,0.0,0.0,,, -greedy,adult,linear,0.0,2.98668945686309e-08,3.0022778071744173e-16,1.1920928910669204e-08,0.0,0.0,,, -greedy,adult,linear,0.0,3.896741829212402e-08,8.935979050608659e-16,2.7815500902583782e-08,0.0,0.0,,, -greedy,adult,linear,0.0,3.7647406747609586e-08,6.964551116624548e-16,2.3841857932360707e-08,0.0,0.0,,, +greedy,adult,linear,0.0,2.98668945686309e-08,3.002277807174417e-16,1.1920928910669204e-08,0.0,0.0,,, +greedy,adult,linear,0.0,3.896741829212402e-08,8.935979050608659e-16,2.7815500902583785e-08,0.0,0.0,,, +greedy,adult,linear,0.0,3.764740674760959e-08,6.964551116624548e-16,2.3841857932360707e-08,0.0,0.0,,, greedy,adult,linear,0.0,4.1721585020138014e-08,7.490693713437114e-16,2.3841857932360707e-08,0.0,0.0,,, -greedy,adult,linear,0.0,4.080038190634738e-08,9.092849789066235e-16,2.7815500902583782e-08,0.0,0.0,,, +greedy,adult,linear,0.0,4.080038190634738e-08,9.092849789066235e-16,2.7815500902583785e-08,0.0,0.0,,, greedy,adult,linear,0.0,2.9642354626435715e-08,3.2207406836013314e-16,1.2247529768139032e-08,0.0,0.0,,, -greedy,adult,linear,0.0,3.0958616947796e-08,7.829605124276932e-16,2.7815500902583782e-08,0.0,0.0,,, -greedy,adult,linear,0.0,4.350400334418581e-08,9.224965772207422e-16,2.7815500902583782e-08,0.0,0.0,,, +greedy,adult,linear,0.0,3.0958616947796e-08,7.829605124276932e-16,2.7815500902583785e-08,0.0,0.0,,, +greedy,adult,linear,0.0,4.350400334418581e-08,9.224965772207422e-16,2.7815500902583785e-08,0.0,0.0,,, greedy,adult,linear,0.0,1.5350821634674272e-08,1.506026600852334e-16,1.1920928910669204e-08,0.0,0.0,,, -greedy,adult,linear,0.0,5.019432058883667e-08,1.024224634202016e-15,2.7815500902583782e-08,0.0,0.0,,, -greedy,adult,linear,0.0,3.1224257845963166e-08,5.147667820331895e-16,1.986821496213764e-08,0.0,0.0,,, -greedy,adult,linear,0.0,2.6991778201423955e-08,4.206608583635433e-16,1.986821496213764e-08,0.0,0.0,,, -greedy,adult,linear,0.0,4.978606954475495e-08,1.015057845055806e-15,2.7815500902583782e-08,0.0,0.0,,, +greedy,adult,linear,0.0,5.0194320588836674e-08,1.024224634202016e-15,2.7815500902583785e-08,0.0,0.0,,, +greedy,adult,linear,0.0,3.1224257845963166e-08,5.1476678203318995e-16,1.9868214962137642e-08,0.0,0.0,,, +greedy,adult,linear,0.0,2.6991778201423955e-08,4.206608583635433e-16,1.9868214962137642e-08,0.0,0.0,,, +greedy,adult,linear,0.0,4.978606954475495e-08,1.015057845055806e-15,2.7815500902583785e-08,0.0,0.0,,, greedy,adult,linear,0.0,3.540202631047151e-08,6.886633420054365e-16,2.3841857932360707e-08,0.0,0.0,,, -greedy,adult,linear,0.0,5.106053374559495e-08,1.0249749594349571e-15,2.7815500902583782e-08,0.0,0.0,,, -greedy,adult,linear,0.0,4.2845802739055465e-08,9.10223125263583e-16,2.7815500902583782e-08,0.0,0.0,,, +greedy,adult,linear,0.0,5.106053374559495e-08,1.0249749594349571e-15,2.7815500902583785e-08,0.0,0.0,,, +greedy,adult,linear,0.0,4.2845802739055465e-08,9.10223125263583e-16,2.7815500902583785e-08,0.0,0.0,,, greedy,adult,linear,0.0,3.6422653781897913e-08,6.909550389633829e-16,2.3841857932360707e-08,0.0,0.0,,, greedy,compass,linear,0.0,1.960679119061837e-10,3.8442626079251005e-20,1.960679119061837e-10,0.0,0.0,0.0,1.0,521.774312715 greedy,compass,linear,0.0,1.960679119061837e-10,3.8442626079251005e-20,1.960679119061837e-10,0.0,0.0,,, greedy,compass,linear,0.0,0.0,0.0,0.0,0.0,0.0,,, greedy,compass,linear,0.0,3.921358238123672e-10,1.5377050431700402e-19,3.921358238123672e-10,0.0,0.0,,, -greedy,compass,linear,0.0,7.842716476247348e-10,6.150820172680161e-19,7.842716476247348e-10,0.0,0.0,,, +greedy,compass,linear,0.0,7.842716476247348e-10,6.1508201726801685e-19,7.842716476247348e-10,0.0,0.0,,, greedy,compass,linear,0.0,0.0,0.0,0.0,0.0,0.0,,, greedy,compass,linear,0.0,1.960679119061837e-10,3.8442626079251005e-20,1.960679119061837e-10,0.0,0.0,,, greedy,compass,linear,0.0,0.0,0.0,0.0,0.0,0.0,,, greedy,compass,linear,0.0,0.0,0.0,0.0,0.0,0.0,,, -greedy,compass,linear,0.0,4.705629857992832e-09,2.2142952360433646e-17,4.705629857992832e-09,0.0,0.0,,, -greedy,compass,linear,0.0,1.5685432952494691e-09,2.4603280690720643e-18,1.5685432952494691e-09,0.0,0.0,,, +greedy,compass,linear,0.0,4.705629857992832e-09,2.214295236043365e-17,4.705629857992832e-09,0.0,0.0,,, +greedy,compass,linear,0.0,1.5685432952494691e-09,2.460328069072065e-18,1.5685432952494691e-09,0.0,0.0,,, greedy,compass,linear,0.0,1.960679119061837e-10,3.8442626079251005e-20,1.960679119061837e-10,0.0,0.0,,, greedy,compass,linear,0.0,0.0,0.0,0.0,0.0,0.0,,, greedy,compass,linear,0.0,0.0,0.0,0.0,0.0,0.0,,, greedy,compass,linear,0.0,0.0,0.0,0.0,0.0,0.0,,, -greedy,compass,linear,0.0,4.705629857992832e-09,2.2142952360433646e-17,4.705629857992832e-09,0.0,0.0,,, -greedy,compass,linear,0.0,7.842716476247348e-10,6.150820172680161e-19,7.842716476247348e-10,0.0,0.0,,, -greedy,compass,linear,0.0,1.5685432952494691e-09,2.4603280690720643e-18,1.5685432952494691e-09,0.0,0.0,,, +greedy,compass,linear,0.0,4.705629857992832e-09,2.214295236043365e-17,4.705629857992832e-09,0.0,0.0,,, +greedy,compass,linear,0.0,7.842716476247348e-10,6.1508201726801685e-19,7.842716476247348e-10,0.0,0.0,,, +greedy,compass,linear,0.0,1.5685432952494691e-09,2.460328069072065e-18,1.5685432952494691e-09,0.0,0.0,,, greedy,compass,linear,0.0,9.411259715985665e-09,8.857180944173456e-17,9.411259715985665e-09,0.0,0.0,,, greedy,compass,linear,0.0,6.274173125486726e-09,3.936524840857987e-17,6.274173125486726e-09,0.0,0.0,,, -greedy,credit,linear,0.0,1.0418362966389469e-08,9.879969299127451e-17,9.93410748106882e-09,0.0,0.0,0.0,1.0,1221.0935097949996 -greedy,credit,linear,0.0,1.0322418517172416e-08,4.9457074139788176e-17,4.967053740534411e-09,0.0,0.0,,, -greedy,credit,linear,0.0,2.9597948975404464e-08,1.9481173434251016e-16,9.93410748106882e-09,0.0,0.0,,, -greedy,credit,linear,0.0,1.5871551891548718e-08,1.237255367104052e-16,9.93410748106882e-09,0.0,0.0,,, -greedy,credit,linear,0.0,1.7090685002635334e-08,1.1553958627835602e-16,9.93410748106882e-09,0.0,0.0,,, -greedy,credit,linear,0.0,2.2044745160614346e-08,3.9758930881682884e-16,1.986821496213764e-08,0.0,0.0,,, -greedy,credit,linear,0.0,3.1486442115917495e-08,4.523589689603062e-16,1.986821496213764e-08,0.0,0.0,,, -greedy,credit,linear,0.0,1.5839885710369758e-08,1.2404825288387974e-16,9.93410748106882e-09,0.0,0.0,,, -greedy,credit,linear,0.0,1.9448150700984712e-08,1.3028006347610814e-16,9.93410748106882e-09,0.0,0.0,,, -greedy,credit,linear,0.0,2.9951102376024614e-08,4.430761904611592e-16,1.986821496213764e-08,0.0,0.0,,, -greedy,credit,linear,0.0,3.1101601943167585e-08,4.94245914764366e-16,1.986821496213764e-08,0.0,0.0,,, -greedy,credit,linear,0.0,5.003567987564052e-09,2.4672289571016706e-17,4.967053740534411e-09,0.0,0.0,,, -greedy,credit,linear,0.0,2.1277441571675163e-08,3.9593946112599883e-16,1.986821496213764e-08,0.0,0.0,,, -greedy,credit,linear,0.0,1.0768829977053642e-08,9.893864661098084e-17,9.93410748106882e-09,0.0,0.0,,, -greedy,credit,linear,0.0,1.0017679815451512e-08,9.86895013828868e-17,9.93410748106882e-09,0.0,0.0,,, -greedy,credit,linear,0.0,4.156805315626636e-08,7.922446074119609e-16,1.986821496213764e-08,0.0,0.0,,, -greedy,credit,linear,0.0,3.1542552708652145e-08,4.94607977713687e-16,1.986821496213764e-08,0.0,0.0,,, -greedy,credit,linear,0.0,2.481466944599665e-08,2.1133414023820086e-16,9.93410748106882e-09,0.0,0.0,,, -greedy,credit,linear,0.0,1.4071315955098073e-08,1.0639896765865544e-16,9.93410748106882e-09,0.0,0.0,,, -greedy,credit,linear,0.0,1.2967881825656781e-08,1.0549411960187271e-16,9.93410748106882e-09,0.0,0.0,,, +greedy,credit,linear,0.0,1.0418362966389469e-08,9.879969299127451e-17,9.934107481068821e-09,0.0,0.0,0.0,1.0,1221.0935097949996 +greedy,credit,linear,0.0,1.0322418517172416e-08,4.9457074139788176e-17,4.9670537405344114e-09,0.0,0.0,,, +greedy,credit,linear,0.0,2.9597948975404464e-08,1.9481173434251016e-16,9.934107481068821e-09,0.0,0.0,,, +greedy,credit,linear,0.0,1.5871551891548718e-08,1.2372553671040521e-16,9.934107481068821e-09,0.0,0.0,,, +greedy,credit,linear,0.0,1.7090685002635334e-08,1.1553958627835602e-16,9.934107481068821e-09,0.0,0.0,,, +greedy,credit,linear,0.0,2.2044745160614346e-08,3.975893088168288e-16,1.9868214962137642e-08,0.0,0.0,,, +greedy,credit,linear,0.0,3.1486442115917495e-08,4.523589689603062e-16,1.9868214962137642e-08,0.0,0.0,,, +greedy,credit,linear,0.0,1.5839885710369758e-08,1.2404825288387974e-16,9.934107481068821e-09,0.0,0.0,,, +greedy,credit,linear,0.0,1.9448150700984712e-08,1.3028006347610814e-16,9.934107481068821e-09,0.0,0.0,,, +greedy,credit,linear,0.0,2.9951102376024614e-08,4.430761904611592e-16,1.9868214962137642e-08,0.0,0.0,,, +greedy,credit,linear,0.0,3.1101601943167585e-08,4.94245914764366e-16,1.9868214962137642e-08,0.0,0.0,,, +greedy,credit,linear,0.0,5.003567987564052e-09,2.4672289571016706e-17,4.9670537405344114e-09,0.0,0.0,,, +greedy,credit,linear,0.0,2.1277441571675163e-08,3.959394611259988e-16,1.9868214962137642e-08,0.0,0.0,,, +greedy,credit,linear,0.0,1.0768829977053642e-08,9.893864661098084e-17,9.934107481068821e-09,0.0,0.0,,, +greedy,credit,linear,0.0,1.0017679815451512e-08,9.86895013828868e-17,9.934107481068821e-09,0.0,0.0,,, +greedy,credit,linear,0.0,4.156805315626636e-08,7.922446074119609e-16,1.9868214962137642e-08,0.0,0.0,,, +greedy,credit,linear,0.0,3.1542552708652145e-08,4.94607977713687e-16,1.9868214962137642e-08,0.0,0.0,,, +greedy,credit,linear,0.0,2.4814669445996653e-08,2.1133414023820086e-16,9.934107481068821e-09,0.0,0.0,,, +greedy,credit,linear,0.0,1.4071315955098073e-08,1.0639896765865544e-16,9.934107481068821e-09,0.0,0.0,,, +greedy,credit,linear,0.0,1.296788182565678e-08,1.0549411960187271e-16,9.934107481068821e-09,0.0,0.0,,, greedy,german,linear,0.0,8.372545629020678e-09,2.450878759627053e-17,3.614191626533092e-09,0.0,0.0,0.0,1.0,763.92596414 greedy,german,linear,0.0,1.654176573495647e-08,2.0302604165676488e-16,1.4024622296826552e-08,0.0,0.0,,, greedy,german,linear,0.0,2.901671847244103e-08,2.8280883178507037e-16,1.0518466764253276e-08,0.0,0.0,,, -greedy,german,linear,0.0,9.749357602478751e-09,7.333027878451805e-17,8.514949301208219e-09,0.0,0.0,,, +greedy,german,linear,0.0,9.749357602478753e-09,7.333027878451805e-17,8.514949301208219e-09,0.0,0.0,,, greedy,german,linear,0.0,1.4139248702416118e-08,1.6418910734843891e-16,1.2772423896301175e-08,0.0,0.0,,, greedy,german,linear,0.0,1.4164297512664346e-09,1.1488776873852222e-18,9.781603027558814e-10,0.0,0.0,,, greedy,german,linear,0.0,8.252541539022218e-09,3.0968978499671866e-17,4.257474622848534e-09,0.0,0.0,,, greedy,german,linear,0.0,1.3165163861850182e-08,1.6328905686734475e-16,1.2772423896301175e-08,0.0,0.0,,, -greedy,german,linear,0.0,1.16082941592488e-08,6.133649769549174e-17,7.0123111761688506e-09,0.0,0.0,,, -greedy,german,linear,0.0,2.14829301223296e-09,1.6606081896272709e-18,9.117470702035835e-10,0.0,0.0,,, -greedy,german,linear,0.0,6.915140723928737e-09,2.117112042547663e-17,3.2837653013428883e-09,0.0,0.0,,, +greedy,german,linear,0.0,1.16082941592488e-08,6.133649769549174e-17,7.01231117616885e-09,0.0,0.0,,, +greedy,german,linear,0.0,2.1482930122329602e-09,1.6606081896272709e-18,9.117470702035835e-10,0.0,0.0,,, +greedy,german,linear,0.0,6.915140723928737e-09,2.1171120425476627e-17,3.2837653013428883e-09,0.0,0.0,,, greedy,german,linear,0.0,1.5773883005332223e-08,1.4902608295704406e-16,1.1892067885987956e-08,0.0,0.0,,, greedy,german,linear,0.0,1.6354618076475088e-08,1.2819906628595526e-16,1.0344065659584345e-08,0.0,0.0,,, greedy,german,linear,0.0,4.088454713668812e-09,1.099757512716703e-17,3.193105974075295e-09,0.0,0.0,,, -greedy,german,linear,0.0,3.6405813953754854e-09,1.0322112624972704e-17,3.193105974075295e-09,0.0,0.0,,, +greedy,german,linear,0.0,3.6405813953754845e-09,1.0322112624972704e-17,3.193105974075295e-09,0.0,0.0,,, greedy,german,linear,0.0,1.7668551427485468e-08,1.0472104587260362e-16,6.386211948150589e-09,0.0,0.0,,, greedy,german,linear,0.0,1.0176535752970396e-08,4.622099853329013e-17,5.259233354371062e-09,0.0,0.0,,, greedy,german,linear,0.0,7.575586866148676e-09,1.9383207471951088e-17,2.817232849783302e-09,0.0,0.0,,, greedy,german,linear,0.0,1.4638677714451376e-08,7.331620660924491e-17,5.99850860649731e-09,0.0,0.0,,, greedy,german,linear,0.0,3.4580299290709604e-08,4.945656094803984e-16,1.805128013732116e-08,0.0,0.0,,, -greedy,mortgage,linear,0.0,1.0332570993254464e-08,6.039633491888591e-17,7.0391607265296585e-09,0.0,0.0,0.1894736842105263,1.0,1647.0909649684215 -greedy,mortgage,linear,0.0,1.3032841264526416e-09,1.3643043583068053e-18,1.1591012247236243e-09,0.0,0.0,,, -greedy,mortgage,linear,0.0,5.409528158750732e-09,2.598004243255979e-17,5.0868372292178784e-09,0.0,0.0,,, +greedy,mortgage,linear,0.0,1.0332570993254464e-08,6.039633491888591e-17,7.039160726529658e-09,0.0,0.0,0.1894736842105263,1.0,1647.0909649684215 +greedy,mortgage,linear,0.0,1.3032841264526412e-09,1.3643043583068053e-18,1.1591012247236243e-09,0.0,0.0,,, +greedy,mortgage,linear,0.0,5.409528158750732e-09,2.598004243255979e-17,5.086837229217878e-09,0.0,0.0,,, greedy,mortgage,linear,0.0,5.36908054474261e-10,2.714354081337102e-19,5.207438195931059e-10,0.0,0.0,,, greedy,mortgage,linear,0.0,3.1514934828447143e-09,7.060421725762903e-18,2.599090803689918e-09,0.0,0.0,,, greedy,mortgage,linear,0.0,1.51235670653449e-08,1.1520071268937018e-16,8.209692592409109e-09,0.0,0.0,,, greedy,mortgage,linear,0.0,1.1619594952883009e-08,6.754349546988761e-17,5.943965347698921e-09,0.0,0.0,,, -greedy,mortgage,linear,0.0,8.197906242735087e-09,4.059305162034081e-17,5.968474214856912e-09,0.0,0.0,,, +greedy,mortgage,linear,0.0,8.197906242735087e-09,4.0593051620340806e-17,5.968474214856912e-09,0.0,0.0,,, greedy,mortgage,linear,0.0,4.197500347302707e-09,9.9305433087987e-18,2.847428584584755e-09,0.0,0.0,,, -greedy,mortgage,linear,0.0,9.97520441248234e-09,5.006761978755895e-17,5.3846338499141674e-09,0.0,0.0,,, +greedy,mortgage,linear,0.0,9.975204412482341e-09,5.006761978755895e-17,5.3846338499141674e-09,0.0,0.0,,, greedy,mortgage,linear,0.0,4.628075062074188e-09,1.4056027608258132e-17,3.607576903874588e-09,0.0,0.0,,, -greedy,mortgage,linear,0.0,9.532791278576981e-09,4.559794157955439e-17,5.050021095742707e-09,0.0,0.0,,, +greedy,mortgage,linear,0.0,9.53279127857698e-09,4.559794157955439e-17,5.050021095742707e-09,0.0,0.0,,, greedy,mortgage,linear,0.0,6.535677971042375e-09,3.22008863274766e-17,5.596287294684643e-09,0.0,0.0,,, -greedy,mortgage,linear,0.0,4.751878168729462e-09,1.4628894462318048e-17,3.6679765058611492e-09,0.0,0.0,,, +greedy,mortgage,linear,0.0,4.751878168729462e-09,1.4628894462318048e-17,3.6679765058611484e-09,0.0,0.0,,, greedy,mortgage,linear,0.0,1.2101761898142271e-08,7.363218473134251e-17,6.501360810817758e-09,0.0,0.0,,, greedy,mortgage,linear,0.0,1.891971426748462e-09,2.0973033737071722e-18,1.338111607629422e-09,0.0,0.0,,, greedy,mortgage,linear,0.0,4.1166315078999816e-09,1.2276025036408094e-17,3.437209794565632e-09,0.0,0.0,,, -greedy,mortgage,linear,0.0,1.9433264664137084e-08,1.9232399362733722e-16,1.1039149983016472e-08,0.0,0.0,,, +greedy,mortgage,linear,0.0,1.9433264664137084e-08,1.923239936273372e-16,1.1039149983016472e-08,0.0,0.0,,, greedy,mortgage,linear,0.0,2.1682280623713797e-09,3.840892450116785e-18,1.9473306855921635e-09,0.0,0.0,,, greedy,twomoon,linear,0.0,1.8665938628092963e-08,1.7591825897088803e-16,1.0257530436152962e-08,0.0,0.0,0.0,1.0,2016.23175166 greedy,twomoon,linear,0.0,1.8632091924875027e-08,2.137083624872153e-16,1.3795495579227436e-08,0.0,0.0,,, greedy,twomoon,linear,0.0,1.3797453513042512e-08,1.4375213652585268e-16,1.1826569834560985e-08,0.0,0.0,,, greedy,twomoon,linear,0.0,3.767193501591493e-09,7.308982136233745e-18,2.210023386517434e-09,0.0,0.0,,, greedy,twomoon,linear,0.0,3.5754390226294694e-08,7.772752307703987e-16,2.6186437573905152e-08,0.0,0.0,,, -greedy,twomoon,linear,0.0,2.6068826278202728e-08,5.698126570661714e-16,2.3758703449061613e-08,0.0,0.0,,, +greedy,twomoon,linear,0.0,2.6068826278202728e-08,5.69812657066172e-16,2.3758703449061613e-08,0.0,0.0,,, greedy,twomoon,linear,0.0,1.6624034954171307e-08,1.4464775997720767e-16,1.0110418280362412e-08,0.0,0.0,,, greedy,twomoon,linear,0.0,3.810764737099959e-08,8.018428030433007e-16,2.520794462057552e-08,0.0,0.0,,, greedy,twomoon,linear,0.0,2.0360846092470908e-08,3.2369424982559667e-16,1.7809716035266376e-08,0.0,0.0,,, -greedy,twomoon,linear,0.0,3.3704222579533656e-08,7.973010507410352e-16,2.7559906135010692e-08,0.0,0.0,,, +greedy,twomoon,linear,0.0,3.3704222579533656e-08,7.973010507410352e-16,2.755990613501069e-08,0.0,0.0,,, greedy,twomoon,linear,0.0,4.011398219150309e-08,8.634995116366633e-16,2.5485330068519832e-08,0.0,0.0,,, greedy,twomoon,linear,0.0,2.618634667439501e-08,5.775182835630938e-16,2.3924976222389205e-08,0.0,0.0,,, greedy,twomoon,linear,0.0,1.0563183633349738e-08,7.924905768885032e-17,8.706400822511284e-09,0.0,0.0,,, greedy,twomoon,linear,0.0,2.1736180633169507e-08,2.5653272629698185e-16,1.4054149954922936e-08,0.0,0.0,,, greedy,twomoon,linear,0.0,1.4143115040221586e-08,1.0958002271729999e-16,9.258584177107563e-09,0.0,0.0,,, -greedy,twomoon,linear,0.0,7.013897851404494e-09,3.303692344652884e-17,5.561157034872366e-09,0.0,0.0,,, +greedy,twomoon,linear,0.0,7.0138978514044944e-09,3.3036923446528836e-17,5.561157034872366e-09,0.0,0.0,,, greedy,twomoon,linear,0.0,3.0023576541582382e-09,4.696676533132629e-18,1.8090755560251639e-09,0.0,0.0,,, greedy,twomoon,linear,0.0,3.8254988787844724e-08,8.133255348584214e-16,2.5515117241248216e-08,0.0,0.0,,, -greedy,twomoon,linear,0.0,3.79971265562773e-08,8.169641861950045e-16,2.589324865631681e-08,0.0,0.0,,, +greedy,twomoon,linear,0.0,3.79971265562773e-08,8.16964186195005e-16,2.589324865631681e-08,0.0,0.0,,, greedy,twomoon,linear,0.0,1.2558747042312746e-08,1.1207942877644404e-16,1.0354810009438609e-08,0.0,0.0,,, mace,adult,linear,5.0,19.960944925915577,290.1268817427592,16.958904109589042,2.0,20.0,0.2999999999999999,1.0,4.443463830000018 mace,adult,linear,4.0,18.879312272854342,289.0352532546293,16.958904109589042,1.0,19.0,,, @@ -929,7 +929,7 @@ mace,adult,linear,5.0,19.418479172490915,278.5084767887341,16.616438356164384,2. mace,adult,linear,4.0,18.665054514956672,285.7636841447108,16.863013698630134,1.0,20.0,,, mace,adult,linear,5.0,19.24039698071009,272.62200653165024,16.438356164383563,2.0,19.0,,, mace,adult,linear,5.0,19.47327369303886,280.3324587740971,16.671232876712327,2.0,21.0,,, -mace,compass,linear,3.0,2.8947368421052637,2.8005540166204987,1.0,2.0,5.0,0.95,1.0,1.3733594249999896 +mace,compass,linear,3.0,2.894736842105264,2.8005540166204987,1.0,2.0,5.0,0.95,1.0,1.3733594249999896 mace,compass,linear,2.0,2.0,2.0,1.0,1.0,3.0,,, mace,compass,linear,3.0,2.9210526315789487,2.848337950138504,1.0,2.0,5.0,,, mace,compass,linear,3.0,2.3684210526315788,2.135734072022161,1.0,2.0,4.0,,, @@ -938,7 +938,7 @@ mace,compass,linear,3.0,2.9210526315789487,2.848337950138504,1.0,2.0,5.0,,, mace,compass,linear,2.0,1.947368421052632,1.8975069252077563,1.0,0.0,1.0,,, mace,compass,linear,2.0,2.0,2.0,1.0,0.0,1.0,,, mace,compass,linear,2.0,1.973684210526316,1.9480609418282548,1.0,1.0,3.0,,, -mace,compass,linear,3.0,2.8947368421052637,2.8005540166204987,1.0,2.0,4.0,,, +mace,compass,linear,3.0,2.894736842105264,2.8005540166204987,1.0,2.0,4.0,,, mace,compass,linear,3.0,2.763157894736842,2.582409972299169,1.0,2.0,4.0,,, mace,compass,linear,4.0,3.8684210526315774,3.754155124653741,1.0,2.0,3.0,,, mace,compass,linear,3.0,2.7894736842105265,2.623268698060942,1.0,2.0,4.0,,, @@ -1117,7 +1117,7 @@ gs,boston_housing,linear,12.0,1.3278428768319612,0.7139102504715427,0.8260869565 gs,boston_housing,linear,12.0,1.6101533774514616,0.7725277559128354,0.8260869565217391,0.0,8.0,,, gs,boston_housing,linear,12.0,1.0247120213136132,0.6882362611721536,0.8260869565217391,0.0,9.0,,, gs,boston_housing,linear,12.0,1.9140002323444945,1.833648502834473,1.0,0.0,11.0,,, -gs,boston_housing,linear,13.0,1.8593895134743117,1.4874654059174075,1.0,0.0,10.0,,, +gs,boston_housing,linear,13.0,1.859389513474312,1.4874654059174075,1.0,0.0,10.0,,, gs,boston_housing,linear,12.0,0.8731262749560516,0.7561459139684649,0.8695652173913043,0.0,11.0,,, gs,boston_housing,linear,12.0,2.596067817316649,1.1010039585205824,0.8260869565217391,0.0,8.0,,, gs,boston_housing,linear,7.0,0.826358110421394,0.682419670765155,0.8260869565217391,0.0,11.0,,, @@ -1145,7 +1145,7 @@ revise,breast_cancer,linear,30.0,3.63276310869432,0.5760430469017892,0.264299031 revise,breast_cancer,linear,30.0,3.9597324620583634,0.8484887647485951,0.4400111847849782,0.0,29.0,,, revise,breast_cancer,linear,30.0,1.6223767527951136,0.1596306564633282,0.2093215591109385,0.0,18.0,,, revise,breast_cancer,linear,30.0,1.395806164104469,0.0809393002444298,0.1027208090803092,0.0,25.0,,, -revise,breast_cancer,linear,30.0,2.6507316776476717,0.3057691727395539,0.2172700740508178,0.0,21.0,,, +revise,breast_cancer,linear,30.0,2.650731677647672,0.3057691727395539,0.2172700740508178,0.0,21.0,,, revise,breast_cancer,linear,30.0,1.1498330002533348,0.0813712486382773,0.1493908143445347,0.0,15.0,,, revise,breast_cancer,linear,30.0,2.361703121745361,0.2825737933886914,0.2385555570567848,0.0,17.0,,, revise,breast_cancer,linear,30.0,2.535804620848422,0.3662759740388305,0.3387314062527936,0.0,20.0,,, @@ -1255,44 +1255,44 @@ wachter,breast_cancer,linear,30.0,0.2999998807596818,0.0029999976151967,0.010000 wachter,breast_cancer,linear,30.0,0.2999998052109058,0.002999996104223,0.010000009733082,0.0,30.0,,, wachter,breast_cancer,linear,30.0,1.0876777649194045,0.0417430300797852,0.0398652481367978,0.0,30.0,,, wachter,breast_cancer,linear,30.0,0.5662130717683613,0.0109411623525196,0.0199690551779838,0.0,30.0,,, -claproar,adult,linear,0.0,4.4887057681908267e-08,9.310567341988123e-16,2.7815500902583782e-08,0.0,19.0,0.4,1.0,0.000289155 -claproar,adult,linear,0.0,2.5058649717746292e-08,2.317603064363525e-16,1.0947791895254966e-08,0.0,19.0,,, -claproar,adult,linear,0.0,4.7037179595754714e-08,8.382903224701333e-16,2.384185793236071e-08,0.0,19.0,,, -claproar,adult,linear,0.0,4.202930070640321e-08,9.042230455022549e-16,2.7815500902583782e-08,0.0,19.0,,, -claproar,adult,linear,0.0,4.468293218762298e-08,9.285983683485345e-16,2.7815500902583782e-08,0.0,19.0,,, -claproar,adult,linear,0.0,5.3588864512565955e-08,1.0883360773883468e-15,2.384185793236071e-08,0.0,20.0,,, -claproar,adult,linear,0.0,4.44788066933377e-08,9.26223336933091e-16,2.7815500902583782e-08,0.0,19.0,,, +claproar,adult,linear,0.0,4.4887057681908267e-08,9.310567341988123e-16,2.7815500902583785e-08,0.0,19.0,0.4,1.0,0.000289155 +claproar,adult,linear,0.0,2.505864971774629e-08,2.317603064363525e-16,1.0947791895254966e-08,0.0,19.0,,, +claproar,adult,linear,0.0,4.703717959575472e-08,8.382903224701333e-16,2.384185793236071e-08,0.0,19.0,,, +claproar,adult,linear,0.0,4.202930070640321e-08,9.042230455022549e-16,2.7815500902583785e-08,0.0,19.0,,, +claproar,adult,linear,0.0,4.468293218762298e-08,9.285983683485345e-16,2.7815500902583785e-08,0.0,19.0,,, +claproar,adult,linear,0.0,5.358886451256596e-08,1.0883360773883468e-15,2.384185793236071e-08,0.0,20.0,,, +claproar,adult,linear,0.0,4.44788066933377e-08,9.26223336933091e-16,2.7815500902583785e-08,0.0,19.0,,, claproar,adult,linear,0.0,1.902782881257892e-08,1.7473878485338283e-16,1.1920928910669204e-08,0.0,17.0,,, -claproar,adult,linear,0.0,5.3569195412883637e-08,1.040959669437839e-15,2.384185793236071e-08,0.0,19.0,,, +claproar,adult,linear,0.0,5.356919541288364e-08,1.040959669437839e-15,2.384185793236071e-08,0.0,19.0,,, claproar,adult,linear,0.0,3.0155784258578684e-08,5.904489425693929e-16,2.384185793236071e-08,0.0,21.0,,, claproar,adult,linear,0.0,2.8175983712852574e-08,2.9012973303829733e-16,1.1920928910669204e-08,0.0,17.0,,, claproar,adult,linear,0.0,5.4671088114965904e-08,1.0146395108614209e-15,2.384185793236071e-08,0.0,19.0,,, claproar,adult,linear,0.0,3.838865064720309e-08,4.591054514009589e-16,1.3380634544812864e-08,0.0,18.0,,, -claproar,adult,linear,0.0,3.6218528287612635e-08,6.90330030702125e-16,2.384185793236071e-08,0.0,19.0,,, -claproar,adult,linear,0.0,4.091341471168519e-08,7.257888334040447e-16,2.384185793236071e-08,0.0,19.0,,, +claproar,adult,linear,0.0,3.621852828761264e-08,6.90330030702125e-16,2.384185793236071e-08,0.0,19.0,,, +claproar,adult,linear,0.0,4.091341471168519e-08,7.25788833404045e-16,2.384185793236071e-08,0.0,19.0,,, claproar,adult,linear,0.0,3.8715524619536985e-08,6.455254533966823e-16,2.189558379050993e-08,0.0,17.0,,, -claproar,adult,linear,0.0,3.3122763809512186e-08,8.018691300248972e-16,2.7815500902583782e-08,0.0,17.0,,, -claproar,adult,linear,0.0,3.906948103926666e-08,8.93649989082638e-16,2.7815500902583782e-08,0.0,19.0,,, -claproar,adult,linear,0.0,3.720527369521776e-08,8.618699235589284e-16,2.7815500902583782e-08,0.0,19.0,,, +claproar,adult,linear,0.0,3.3122763809512186e-08,8.0186913002489725e-16,2.7815500902583785e-08,0.0,17.0,,, +claproar,adult,linear,0.0,3.906948103926666e-08,8.93649989082638e-16,2.7815500902583785e-08,0.0,19.0,,, +claproar,adult,linear,0.0,3.720527369521776e-08,8.61869923558929e-16,2.7815500902583785e-08,0.0,19.0,,, claproar,adult,linear,0.0,2.932825132750949e-08,5.851263616294832e-16,2.384185793236071e-08,0.0,19.0,,, claproar,boston_housing,linear,0.0,9.061724014105925e-08,1.3999423441805425e-15,2.5034179174099336e-08,0.0,0.0,0.02,1.0,0.0001366950000001 claproar,boston_housing,linear,0.0,4.366531408488399e-08,2.996694930604621e-16,1.1248201148283954e-08,0.0,0.0,,, claproar,boston_housing,linear,0.0,7.143715610637842e-08,1.1645570604649212e-15,2.9168230608611626e-08,0.0,0.0,,, -claproar,boston_housing,linear,0.0,8.584456340221537e-08,1.301974406797914e-15,2.57458158126056e-08,0.0,0.0,,, -claproar,boston_housing,linear,0.0,6.003495297255986e-08,5.817581967443228e-16,1.3950023136644065e-08,0.0,0.0,,, +claproar,boston_housing,linear,0.0,8.584456340221538e-08,1.301974406797914e-15,2.57458158126056e-08,0.0,0.0,,, +claproar,boston_housing,linear,0.0,6.003495297255986e-08,5.81758196744323e-16,1.3950023136644065e-08,0.0,0.0,,, claproar,boston_housing,linear,0.0,5.51889046342298e-08,5.263015888946465e-16,1.2712090491362462e-08,0.0,0.0,,, -claproar,boston_housing,linear,0.0,8.763516998709643e-08,1.5959271130303697e-15,2.7230746657558317e-08,0.0,0.0,,, -claproar,boston_housing,linear,0.0,7.374971872347452e-08,1.0212652946082925e-15,2.4605007920008862e-08,0.0,0.0,,, +claproar,boston_housing,linear,0.0,8.763516998709644e-08,1.5959271130303697e-15,2.7230746657558323e-08,0.0,0.0,,, +claproar,boston_housing,linear,0.0,7.374971872347452e-08,1.0212652946082925e-15,2.4605007920008866e-08,0.0,0.0,,, claproar,boston_housing,linear,0.0,6.883750204231266e-08,5.456698746073747e-16,1.1920928966180357e-08,0.0,0.0,,, -claproar,boston_housing,linear,0.0,1.1150353898797123e-07,2.2413818069540485e-15,2.9168230608611626e-08,0.0,0.0,,, +claproar,boston_housing,linear,0.0,1.1150353898797124e-07,2.2413818069540485e-15,2.9168230608611626e-08,0.0,0.0,,, claproar,boston_housing,linear,0.0,1.0484100711324537e-07,2.002289077516144e-15,2.6947928999376813e-08,0.0,0.0,,, -claproar,boston_housing,linear,0.0,5.970173126896608e-08,8.135558646996096e-16,2.282731081937328e-08,0.0,0.0,,, +claproar,boston_housing,linear,0.0,5.970173126896608e-08,8.1355586469961045e-16,2.282731081937328e-08,0.0,0.0,,, claproar,boston_housing,linear,0.0,8.678486050281432e-08,1.3755723814526745e-15,2.5694670835463285e-08,0.0,0.0,,, claproar,boston_housing,linear,0.0,8.352237716811178e-08,1.5585264851321046e-15,2.5965772199043613e-08,0.0,0.0,,, claproar,boston_housing,linear,0.0,9.309491971634753e-08,1.2676356736093877e-15,2.384185793236071e-08,0.0,0.0,,, -claproar,boston_housing,linear,0.0,8.416630588797347e-08,1.3575024970605357e-15,2.1982387687913277e-08,0.0,0.0,,, +claproar,boston_housing,linear,0.0,8.416630588797347e-08,1.3575024970605357e-15,2.198238768791328e-08,0.0,0.0,,, claproar,boston_housing,linear,0.0,8.87948349528227e-08,1.5934972949357408e-15,2.5391115654471722e-08,0.0,0.0,,, -claproar,boston_housing,linear,0.0,7.360492395734521e-08,1.1679592718862616e-15,2.6886544102211477e-08,0.0,0.0,,, +claproar,boston_housing,linear,0.0,7.360492395734521e-08,1.1679592718862616e-15,2.6886544102211484e-08,0.0,0.0,,, claproar,boston_housing,linear,0.0,7.300165475159631e-08,1.0470980747592357e-15,2.5787945556743352e-08,0.0,0.0,,, claproar,boston_housing,linear,0.0,7.055573051197375e-08,8.106427157505881e-16,1.7371899585505446e-08,0.0,0.0,,, claproar,breast_cancer,linear,0.0,1.2722827237576786e-07,1.3621976030255849e-15,2.5313179108366057e-08,0.0,0.0,0.0,1.0,0.0001647599999998 @@ -1301,27 +1301,27 @@ claproar,breast_cancer,linear,0.0,1.2776102209857698e-07,1.532069116948366e-15,2 claproar,breast_cancer,linear,0.0,1.143023532941545e-07,7.77961866245447e-16,1.3305683277398115e-08,0.0,0.0,,, claproar,breast_cancer,linear,0.0,1.0965009291513673e-07,7.226589272229269e-16,1.2031535490120149e-08,0.0,0.0,,, claproar,breast_cancer,linear,0.0,1.1131060727309629e-07,6.378162621539052e-16,1.1086480844468838e-08,0.0,0.0,,, -claproar,breast_cancer,linear,0.0,3.276065683316398e-07,5.005506720082984e-15,2.69428246379988e-08,0.0,0.0,,, -claproar,breast_cancer,linear,0.0,1.3500344207655057e-07,1.3901976831516796e-15,2.323979064744464e-08,0.0,0.0,,, -claproar,breast_cancer,linear,0.0,1.762103322283659e-07,1.6282664896193405e-15,1.3235455065263581e-08,0.0,0.0,,, -claproar,breast_cancer,linear,0.0,2.0018567371826726e-07,1.9573354157098936e-15,1.873135690999561e-08,0.0,0.0,,, +claproar,breast_cancer,linear,0.0,3.276065683316398e-07,5.0055067200829836e-15,2.69428246379988e-08,0.0,0.0,,, +claproar,breast_cancer,linear,0.0,1.3500344207655057e-07,1.3901976831516796e-15,2.3239790647444642e-08,0.0,0.0,,, +claproar,breast_cancer,linear,0.0,1.762103322283659e-07,1.6282664896193405e-15,1.323545506526358e-08,0.0,0.0,,, +claproar,breast_cancer,linear,0.0,2.0018567371826728e-07,1.9573354157098936e-15,1.873135690999561e-08,0.0,0.0,,, claproar,breast_cancer,linear,0.0,2.346125146171496e-07,3.5414838956685216e-15,2.6258815899460334e-08,0.0,0.0,,, claproar,breast_cancer,linear,0.0,1.4369297782201862e-07,1.2414470927363266e-15,1.4410150683819012e-08,0.0,0.0,,, claproar,breast_cancer,linear,0.0,1.250053055026901e-07,9.7016354828478e-16,1.2764116597008268e-08,0.0,0.0,,, claproar,breast_cancer,linear,0.0,1.7436790923641698e-07,1.825586382912487e-15,2.3227377021761697e-08,0.0,0.0,,, -claproar,breast_cancer,linear,0.0,1.3030769424604574e-07,8.959662590509908e-16,1.180577696713314e-08,0.0,0.0,,, +claproar,breast_cancer,linear,0.0,1.3030769424604574e-07,8.959662590509908e-16,1.1805776967133141e-08,0.0,0.0,,, claproar,breast_cancer,linear,0.0,2.8008889979558305e-07,4.416252830579859e-15,2.7243686639977227e-08,0.0,0.0,,, -claproar,breast_cancer,linear,0.0,1.4958639424703104e-07,1.4094146400845972e-15,2.5037650841497342e-08,0.0,0.0,,, +claproar,breast_cancer,linear,0.0,1.4958639424703104e-07,1.4094146400845972e-15,2.5037650841497345e-08,0.0,0.0,,, claproar,breast_cancer,linear,0.0,2.480164991922052e-07,3.9710986682389465e-15,2.6059803759359568e-08,0.0,0.0,,, -claproar,breast_cancer,linear,0.0,9.733526553160067e-08,4.754142392106929e-16,8.958045749629662e-09,0.0,0.0,,, -claproar,breast_cancer,linear,0.0,2.394264729671525e-07,3.267236243382033e-15,2.8646970018364474e-08,0.0,0.0,,, +claproar,breast_cancer,linear,0.0,9.733526553160068e-08,4.754142392106929e-16,8.958045749629662e-09,0.0,0.0,,, +claproar,breast_cancer,linear,0.0,2.394264729671525e-07,3.267236243382033e-15,2.8646970018364484e-08,0.0,0.0,,, claproar,compass,linear,0.0,1.9606791190618367e-10,3.8442626079251005e-20,1.9606791190618367e-10,0.0,1.0,0.9,1.0,0.000376535 claproar,compass,linear,0.0,3.137086562743363e-09,9.841312102144969e-18,3.137086562743363e-09,0.0,0.0,,, claproar,compass,linear,0.0,1.9606791190618367e-10,3.8442626079251005e-20,1.9606791190618367e-10,0.0,1.0,,, claproar,compass,linear,0.0,0.0,0.0,0.0,0.0,0.0,,, claproar,compass,linear,0.0,0.0,0.0,0.0,0.0,1.0,,, claproar,compass,linear,0.0,3.921358238123673e-10,1.5377050431700402e-19,3.921358238123673e-10,0.0,1.0,,, -claproar,compass,linear,0.0,4.705629857992832e-09,2.2142952360433646e-17,4.705629857992832e-09,0.0,1.0,,, +claproar,compass,linear,0.0,4.705629857992832e-09,2.214295236043365e-17,4.705629857992832e-09,0.0,1.0,,, claproar,compass,linear,0.0,2.3528149151186284e-09,5.535738024804679e-18,2.3528149151186284e-09,0.0,1.0,,, claproar,compass,linear,0.0,1.2548346250973452e-08,1.5746099363431948e-16,1.2548346250973452e-08,0.0,4.0,,, claproar,compass,linear,0.0,1.9606791190618367e-10,3.8442626079251005e-20,1.9606791190618367e-10,0.0,4.0,,, @@ -1332,86 +1332,86 @@ claproar,compass,linear,0.0,6.274173125486726e-09,3.936524840857987e-17,6.274173 claproar,compass,linear,0.0,1.9606791190618367e-10,3.8442626079251005e-20,1.9606791190618367e-10,0.0,4.0,,, claproar,compass,linear,0.0,1.9606791190618367e-10,3.8442626079251005e-20,1.9606791190618367e-10,0.0,4.0,,, claproar,compass,linear,0.0,1.4116889546222922e-08,1.99286570460258e-16,1.4116889546222922e-08,0.0,1.0,,, -claproar,compass,linear,0.0,4.705629830237257e-09,2.2142952099218717e-17,4.705629830237257e-09,0.0,1.0,,, +claproar,compass,linear,0.0,4.705629830237257e-09,2.214295209921872e-17,4.705629830237257e-09,0.0,1.0,,, claproar,compass,linear,0.0,9.411259715985665e-09,8.857180944173456e-17,9.411259715985665e-09,0.0,4.0,,, claproar,compass,linear,0.0,9.411259715985665e-09,8.857180944173456e-17,9.411259715985665e-09,0.0,1.0,,, claproar,credit,linear,0.0,4.82676465853385e-10,1.2980474773473689e-19,3.4027269543712713e-10,0.0,12.0,0.04,1.0,0.00032931 -claproar,credit,linear,0.0,1.1113991400368143e-09,7.447455477786602e-19,8.449894464690465e-10,0.0,9.0,,, -claproar,credit,linear,0.0,2.021795266165909e-08,1.9742965999263398e-16,9.93410748106882e-09,0.0,15.0,,, -claproar,credit,linear,0.0,1.5301913292888047e-08,1.2342800919937284e-16,9.93410748106882e-09,0.0,14.0,,, -claproar,credit,linear,0.0,1.30755773222746e-08,1.0167992288693263e-16,9.93410748106882e-09,0.0,5.0,,, +claproar,credit,linear,0.0,1.1113991400368143e-09,7.4474554777866025e-19,8.44989446469046e-10,0.0,9.0,,, +claproar,credit,linear,0.0,2.0217952661659093e-08,1.9742965999263398e-16,9.934107481068821e-09,0.0,15.0,,, +claproar,credit,linear,0.0,1.5301913292888047e-08,1.2342800919937284e-16,9.934107481068821e-09,0.0,14.0,,, +claproar,credit,linear,0.0,1.30755773222746e-08,1.0167992288693263e-16,9.934107481068821e-09,0.0,5.0,,, claproar,credit,linear,0.0,3.761769134906501e-09,8.378031056780479e-18,2.731830678337488e-09,0.0,16.0,,, -claproar,credit,linear,0.0,1.259257685843132e-08,1.0193932457851784e-16,9.93410748106882e-09,0.0,17.0,,, -claproar,credit,linear,0.0,2.1391959821592813e-08,1.6037317963066943e-16,9.93410748106882e-09,0.0,14.0,,, -claproar,credit,linear,0.0,1.68380109265287e-08,1.248609773609793e-16,9.93410748106882e-09,0.0,15.0,,, -claproar,credit,linear,0.0,4.259749340762564e-08,5.953074907643577e-16,1.986821496213764e-08,0.0,14.0,,, -claproar,credit,linear,0.0,1.5542197407542035e-08,1.1598119436146716e-16,9.93410748106882e-09,0.0,14.0,,, -claproar,credit,linear,0.0,1.1816963568691412e-08,9.992747074788836e-17,9.93410748106882e-09,0.0,14.0,,, -claproar,credit,linear,0.0,1.7953023116745737e-08,1.435876765061383e-16,9.93410748106882e-09,0.0,15.0,,, -claproar,credit,linear,0.0,2.162112459251977e-08,3.963272002079719e-16,1.986821496213764e-08,0.0,1.0,,, -claproar,credit,linear,0.0,1.0334882656268252e-08,9.874221703042575e-17,9.93410748106882e-09,0.0,3.0,,, -claproar,credit,linear,0.0,2.8958002536681733e-09,5.096037251706132e-18,2.206080235156449e-09,0.0,0.0,,, -claproar,credit,linear,0.0,1.0160594221130144e-08,9.872965301854498e-17,9.93410748106882e-09,0.0,14.0,,, -claproar,credit,linear,0.0,2.4814669445996643e-08,2.1133414023820086e-16,9.93410748106882e-09,0.0,16.0,,, +claproar,credit,linear,0.0,1.259257685843132e-08,1.0193932457851784e-16,9.934107481068821e-09,0.0,17.0,,, +claproar,credit,linear,0.0,2.1391959821592813e-08,1.6037317963066943e-16,9.934107481068821e-09,0.0,14.0,,, +claproar,credit,linear,0.0,1.68380109265287e-08,1.248609773609793e-16,9.934107481068821e-09,0.0,15.0,,, +claproar,credit,linear,0.0,4.259749340762564e-08,5.95307490764358e-16,1.9868214962137642e-08,0.0,14.0,,, +claproar,credit,linear,0.0,1.5542197407542035e-08,1.1598119436146716e-16,9.934107481068821e-09,0.0,14.0,,, +claproar,credit,linear,0.0,1.1816963568691412e-08,9.992747074788836e-17,9.934107481068821e-09,0.0,14.0,,, +claproar,credit,linear,0.0,1.7953023116745737e-08,1.435876765061383e-16,9.934107481068821e-09,0.0,15.0,,, +claproar,credit,linear,0.0,2.162112459251977e-08,3.963272002079719e-16,1.9868214962137642e-08,0.0,1.0,,, +claproar,credit,linear,0.0,1.0334882656268252e-08,9.874221703042575e-17,9.934107481068821e-09,0.0,3.0,,, +claproar,credit,linear,0.0,2.895800253668173e-09,5.096037251706133e-18,2.206080235156449e-09,0.0,0.0,,, +claproar,credit,linear,0.0,1.0160594221130144e-08,9.872965301854498e-17,9.934107481068821e-09,0.0,14.0,,, +claproar,credit,linear,0.0,2.4814669445996643e-08,2.1133414023820086e-16,9.934107481068821e-09,0.0,16.0,,, claproar,credit,linear,0.0,2.0101766453652079e-10,1.611213453614543e-20,1.1535837302761863e-10,0.0,0.0,,, -claproar,credit,linear,0.0,2.9443530238926208e-08,2.423296607737863e-16,9.93410748106882e-09,0.0,17.0,,, +claproar,credit,linear,0.0,2.9443530238926208e-08,2.423296607737863e-16,9.934107481068821e-09,0.0,17.0,,, claproar,german,linear,0.0,7.538642377014071e-09,2.0977189986947928e-17,3.6568272715475918e-09,0.0,0.0,0.0899999999999999,1.0,0.00043231 claproar,german,linear,0.0,1.5569372430190853e-08,9.906311354915359e-17,7.430082687998052e-09,0.0,0.0,,, claproar,german,linear,0.0,1.4227989147963528e-08,8.092924525249801e-17,6.3862119481505886e-09,0.0,0.0,,, claproar,german,linear,0.0,3.3625227491285166e-08,5.316490663571029e-16,2.1036933417484253e-08,0.0,0.0,,, -claproar,german,linear,0.0,1.4218012350797338e-08,7.207826597945518e-17,6.4543821398643794e-09,0.0,0.0,,, -claproar,german,linear,0.0,3.1002004852176404e-09,4.88802129704364e-18,1.7530777940422124e-09,0.0,0.0,,, +claproar,german,linear,0.0,1.4218012350797338e-08,7.207826597945518e-17,6.454382139864379e-09,0.0,0.0,,, +claproar,german,linear,0.0,3.10020048521764e-09,4.88802129704364e-18,1.753077794042212e-09,0.0,0.0,,, claproar,german,linear,0.0,3.9374481508591685e-08,6.115403763463197e-16,2.1036933417484253e-08,0.0,0.0,,, claproar,german,linear,0.0,1.5867952896098103e-08,1.2548404328882016e-16,1.0518466708742125e-08,0.0,0.0,,, claproar,german,linear,0.0,4.273285919564884e-08,6.887950706179222e-16,2.1036933417484253e-08,0.0,0.0,,, claproar,german,linear,0.0,3.6472797296238646e-08,6.502253230081811e-16,2.1036933417484253e-08,0.0,0.0,,, claproar,german,linear,0.0,3.924216507300571e-09,7.78712506417307e-18,2.171138713258358e-09,0.0,0.0,,, -claproar,german,linear,0.0,7.63849651586046e-09,2.3849574051501758e-17,4.2574746506041095e-09,0.0,0.0,,, -claproar,german,linear,0.0,1.0550426268740589e-08,5.5491152881487364e-17,7.01231117616885e-09,0.0,0.0,,, +claproar,german,linear,0.0,7.63849651586046e-09,2.384957405150176e-17,4.2574746506041095e-09,0.0,0.0,,, +claproar,german,linear,0.0,1.0550426268740589e-08,5.5491152881487364e-17,7.0123111761688506e-09,0.0,0.0,,, claproar,german,linear,0.0,1.3106205704049856e-08,5.896233001147296e-17,5.342575465361321e-09,0.0,0.0,,, claproar,german,linear,0.0,9.159031505445725e-09,3.111229248723116e-17,4.257474622848534e-09,0.0,0.0,,, claproar,german,linear,0.0,1.257860320763582e-08,1.5068508953204156e-16,1.2271544502784336e-08,0.0,0.0,,, claproar,german,linear,0.0,9.113936133564593e-09,3.0830362473144466e-17,4.2574746506041095e-09,0.0,0.0,,, claproar,german,linear,0.0,1.1267739435938397e-08,5.323127357499764e-17,6.321555667820888e-09,0.0,0.0,,, -claproar,german,linear,0.0,1.16082941592488e-08,6.133649769549174e-17,7.01231117616885e-09,0.0,0.0,,, +claproar,german,linear,0.0,1.16082941592488e-08,6.133649769549174e-17,7.0123111761688506e-09,0.0,0.0,,, claproar,german,linear,0.0,2.9016718472441028e-08,2.828088317850704e-16,1.0518466764253276e-08,0.0,0.0,,, -claproar,mortgage,linear,0.0,1.3887633598663742e-08,1.9052277768977198e-16,1.3802737786061671e-08,0.0,0.0,0.0,1.0,0.000483415 +claproar,mortgage,linear,0.0,1.3887633598663742e-08,1.9052277768977198e-16,1.3802737786061673e-08,0.0,0.0,0.0,1.0,0.000483415 claproar,mortgage,linear,0.0,8.284418373527558e-09,6.060678374710688e-17,7.767880838560615e-09,0.0,0.0,,, -claproar,mortgage,linear,0.0,1.2185677800147232e-08,8.403084346710685e-17,8.304791854207849e-09,0.0,0.0,,, -claproar,mortgage,linear,0.0,1.935810234332536e-08,1.9035108122162226e-16,1.0900324698415176e-08,0.0,0.0,,, +claproar,mortgage,linear,0.0,1.2185677800147232e-08,8.403084346710684e-17,8.304791854207849e-09,0.0,0.0,,, +claproar,mortgage,linear,0.0,1.9358102343325363e-08,1.9035108122162224e-16,1.0900324698415176e-08,0.0,0.0,,, claproar,mortgage,linear,0.0,1.85390625628834e-08,2.580389365682648e-16,1.5834229150080148e-08,0.0,0.0,,, -claproar,mortgage,linear,0.0,2.2037257796370117e-08,3.015749829579498e-16,1.6438712724387017e-08,0.0,0.0,,, +claproar,mortgage,linear,0.0,2.2037257796370123e-08,3.015749829579498e-16,1.6438712724387017e-08,0.0,0.0,,, claproar,mortgage,linear,0.0,2.231032841804037e-08,2.5888876133196194e-16,1.3392728093197805e-08,0.0,0.0,,, -claproar,mortgage,linear,0.0,1.802495430691664e-08,1.952824589065916e-16,1.3064203330248604e-08,0.0,0.0,,, +claproar,mortgage,linear,0.0,1.802495430691664e-08,1.9528245890659162e-16,1.3064203330248604e-08,0.0,0.0,,, claproar,mortgage,linear,0.0,1.6488733628472474e-08,1.6895817973515587e-16,1.2307555918855684e-08,0.0,0.0,,, -claproar,mortgage,linear,0.0,1.0237059644424562e-08,5.240462888285314e-17,5.172999084646079e-09,0.0,0.0,,, +claproar,mortgage,linear,0.0,1.0237059644424562e-08,5.2404628882853135e-17,5.172999084646079e-09,0.0,0.0,,, claproar,mortgage,linear,0.0,1.2585979369816869e-08,8.138647215995989e-17,7.337746549263002e-09,0.0,0.0,,, claproar,mortgage,linear,0.0,1.1000842292663292e-08,1.0811745682774963e-16,1.0379365256163451e-08,0.0,0.0,,, -claproar,mortgage,linear,0.0,7.565413406851462e-09,3.768960840903487e-17,5.9124799778320636e-09,0.0,0.0,,, -claproar,mortgage,linear,0.0,5.880313874717657e-09,1.746619573845589e-17,3.23777255006874e-09,0.0,0.0,,, -claproar,mortgage,linear,0.0,9.831747527222491e-09,4.833580424408305e-17,4.9615603292529906e-09,0.0,0.0,,, +claproar,mortgage,linear,0.0,7.565413406851462e-09,3.7689608409034865e-17,5.9124799778320636e-09,0.0,0.0,,, +claproar,mortgage,linear,0.0,5.880313874717657e-09,1.746619573845589e-17,3.2377725500687404e-09,0.0,0.0,,, +claproar,mortgage,linear,0.0,9.831747527222493e-09,4.8335804244083045e-17,4.9615603292529906e-09,0.0,0.0,,, claproar,mortgage,linear,0.0,1.2038921637369526e-08,7.615116443611207e-17,7.376543598169861e-09,0.0,0.0,,, -claproar,mortgage,linear,0.0,9.491931074467884e-09,5.3056655273239444e-17,6.746999958728851e-09,0.0,0.0,,, +claproar,mortgage,linear,0.0,9.491931074467884e-09,5.3056655273239444e-17,6.7469999587288515e-09,0.0,0.0,,, claproar,mortgage,linear,0.0,1.3952049127130552e-08,9.909163417973423e-17,7.914586430679549e-09,0.0,0.0,,, claproar,mortgage,linear,0.0,1.6149514947549903e-08,1.831977915925236e-16,1.3212576810417433e-08,0.0,0.0,,, claproar,mortgage,linear,0.0,6.244202488403516e-09,3.5422295968969385e-17,5.944091996390455e-09,0.0,0.0,,, -claproar,twomoon,linear,0.0,3.3813752992051256e-08,8.816470920645754e-16,2.935601595144277e-08,0.0,0.0,0.0,1.0,0.0002995849999999 +claproar,twomoon,linear,0.0,3.3813752992051256e-08,8.81647092064576e-16,2.935601595144277e-08,0.0,0.0,0.0,1.0,0.0002995849999999 claproar,twomoon,linear,0.0,3.4346029686993524e-08,7.109226862390693e-16,2.4954332489279807e-08,0.0,0.0,,, claproar,twomoon,linear,0.0,4.482949123030266e-09,1.008076735915938e-17,2.368657381346395e-09,0.0,0.0,,, claproar,twomoon,linear,0.0,2.330994314236889e-08,3.089242255594349e-16,1.5970495925721195e-08,0.0,0.0,,, claproar,twomoon,linear,0.0,1.7312552447190654e-08,1.7049036072742816e-16,1.1867827609535198e-08,0.0,0.0,,, claproar,twomoon,linear,0.0,3.9368383886184695e-08,8.721299864375876e-16,2.6655388452034856e-08,0.0,0.0,,, -claproar,twomoon,linear,0.0,2.0062428718325972e-08,2.19180580099984e-16,1.3025380107833941e-08,0.0,0.0,,, +claproar,twomoon,linear,0.0,2.006242871832597e-08,2.19180580099984e-16,1.3025380107833941e-08,0.0,0.0,,, claproar,twomoon,linear,0.0,1.3714618218862997e-08,9.830191742393739e-17,8.316168420563486e-09,0.0,0.0,,, claproar,twomoon,linear,0.0,4.232974959439417e-08,9.81955014937161e-16,2.7724263662598503e-08,0.0,0.0,,, claproar,twomoon,linear,0.0,2.6678959041959164e-08,4.836581604536637e-16,2.1332436817012024e-08,0.0,0.0,,, claproar,twomoon,linear,0.0,1.4165018380474237e-08,1.6142307453820044e-16,1.2609678523567425e-08,0.0,0.0,,, claproar,twomoon,linear,0.0,1.435943769445558e-08,1.8518351927822914e-16,1.3586230918072316e-08,0.0,0.0,,, claproar,twomoon,linear,0.0,1.4451852714536528e-08,1.0445544937769891e-16,7.343028740613988e-09,0.0,0.0,,, -claproar,twomoon,linear,0.0,3.030812995019616e-08,7.814043373292921e-16,2.7844868077941957e-08,0.0,0.0,,, -claproar,twomoon,linear,0.0,7.135585122775012e-09,2.5460330042224788e-17,3.5997497205286773e-09,0.0,0.0,,, +claproar,twomoon,linear,0.0,3.030812995019616e-08,7.814043373292921e-16,2.7844868077941963e-08,0.0,0.0,,, +claproar,twomoon,linear,0.0,7.135585122775012e-09,2.5460330042224788e-17,3.599749720528677e-09,0.0,0.0,,, claproar,twomoon,linear,0.0,2.4025606226718565e-08,3.495247200454482e-16,1.7531400886561244e-08,0.0,0.0,,, -claproar,twomoon,linear,0.0,2.2577137270829443e-08,3.667492287085482e-16,1.876806277056886e-08,0.0,0.0,,, +claproar,twomoon,linear,0.0,2.2577137270829453e-08,3.667492287085482e-16,1.876806277056886e-08,0.0,0.0,,, claproar,twomoon,linear,0.0,1.6213392628472434e-08,1.431988503656087e-16,1.0531753025233572e-08,0.0,0.0,,, claproar,twomoon,linear,0.0,3.312241828035134e-08,7.202236947752843e-16,2.5826099814274528e-08,0.0,0.0,,, claproar,twomoon,linear,0.0,3.2146713291325575e-08,6.212848124877535e-16,2.330451231991049e-08,0.0,0.0,,, @@ -1526,54 +1526,54 @@ cfvae,boston_housing,linear,13.0,2.354798590209251,1.2301028537404477,1.0,0.0,11 probe,adult,linear,51.0,2.5574682458217044,0.20724095760647737,0.10291039943695068,2.0,51.0,0.0,1.0,11.27795516 probe,adult,linear,48.0,1.6209024338863478,0.0782517097074313,0.06244194507598877,2.0,51.0,,, probe,adult,linear,51.0,6.151970284269187,1.3522899002686382,0.268756240606308,2.0,48.0,,, -probe,adult,linear,47.0,4.1338594518437715,0.6257325925713797,0.1863815188407898,2.0,44.0,,, -probe,adult,linear,49.0,6.529014715911816,1.5907548243066623,0.2905552387237549,2.0,27.0,,, -probe,compass,linear,7.0,1.0493243297463968,0.15758896097912403,0.15593880414962769,5.0,3.0,0.0,1.0,3.9818648800000007 -probe,compass,linear,7.0,1.1968591064214706,0.20529106263232566,0.178464874625206,5.0,3.0,,, -probe,compass,linear,7.0,0.5305286708631014,0.040413185530297796,0.0809311717748642,5.0,6.0,,, +probe,adult,linear,47.0,4.133859451843772,0.6257325925713797,0.1863815188407898,2.0,44.0,,, +probe,adult,linear,49.0,6.529014715911816,1.5907548243066625,0.2905552387237549,2.0,27.0,,, +probe,compass,linear,7.0,1.0493243297463968,0.157588960979124,0.1559388041496276,5.0,3.0,0.0,1.0,3.9818648800000007 +probe,compass,linear,7.0,1.1968591064214706,0.2052910626323256,0.178464874625206,5.0,3.0,,, +probe,compass,linear,7.0,0.5305286708631014,0.0404131855302977,0.0809311717748642,5.0,6.0,,, probe,compass,linear,7.0,2.068292945623398,0.6289491666450724,0.3217114806175232,5.0,2.0,,, -probe,compass,linear,6.0,0.11717507370600574,0.002380937481501453,0.024646831676363945,5.0,7.0,,, -probe,credit,linear,20.0,0.7460355385040021,0.03599241548631901,0.05706929787993431,5.0,18.0,0.2666666666666667,1.0,4.403598066666665 -probe,credit,linear,18.0,0.2715397661723585,0.005983706032008009,0.028109369799494743,5.0,20.0,,, -probe,credit,linear,18.0,0.2347176402264215,0.004717934816094677,0.025857295840978622,5.0,20.0,,, -probe,german,linear,4.0,0.34145124321402176,0.02916159435279554,0.08767480622319615,2.0,2.0,0.19999999999999996,1.0,3.3755328400000026 -probe,german,linear,4.0,0.3343678011115066,0.027964667531387127,0.08589213144253283,2.0,2.0,,, -probe,german,linear,4.0,0.267133860467632,0.017855097803688207,0.06914474771303289,2.0,2.0,,, -probe,german,linear,4.0,0.3044240432907651,0.023182781515892512,0.07841214362312765,2.0,2.0,,, -probe,german,linear,4.0,0.2473267066867472,0.015308234751386422,0.06424275391242085,2.0,3.0,,, -probe,mortgage,linear,2.0,2.596893806724318,3.3737499230649304,1.3286230641796841,0.0,0.0,0.0,1.0,7.078433219999999 -probe,mortgage,linear,2.0,2.7662955305674433,3.8262065210593192,1.3854972163486525,0.0,0.0,,, +probe,compass,linear,6.0,0.1171750737060057,0.0023809374815014,0.0246468316763639,5.0,7.0,,, +probe,credit,linear,20.0,0.7460355385040021,0.035992415486319,0.0570692978799343,5.0,18.0,0.2666666666666667,1.0,4.403598066666665 +probe,credit,linear,18.0,0.2715397661723585,0.005983706032008,0.0281093697994947,5.0,20.0,,, +probe,credit,linear,18.0,0.2347176402264215,0.0047179348160946,0.0258572958409786,5.0,20.0,,, +probe,german,linear,4.0,0.3414512432140217,0.0291615943527955,0.0876748062231961,2.0,2.0,0.1999999999999999,1.0,3.375532840000002 +probe,german,linear,4.0,0.3343678011115066,0.0279646675313871,0.0858921314425328,2.0,2.0,,, +probe,german,linear,4.0,0.267133860467632,0.0178550978036882,0.0691447477130328,2.0,2.0,,, +probe,german,linear,4.0,0.3044240432907651,0.0231827815158925,0.0784121436231276,2.0,2.0,,, +probe,german,linear,4.0,0.2473267066867472,0.0153082347513864,0.0642427539124208,2.0,3.0,,, +probe,mortgage,linear,2.0,2.596893806724318,3.3737499230649304,1.328623064179684,0.0,0.0,0.0,1.0,7.078433219999999 +probe,mortgage,linear,2.0,2.766295530567444,3.8262065210593192,1.3854972163486523,0.0,0.0,,, probe,mortgage,linear,2.0,2.9121862424407814,4.241009224725797,1.473339416403974,0.0,0.0,,, probe,mortgage,linear,2.0,2.904149937359671,4.217532586486467,1.4677139768545209,0.0,0.0,,, probe,mortgage,linear,2.0,3.148416728152398,4.960202167161096,1.6185830196025777,0.0,0.0,,, -probe,boston_housing,linear,12.0,1.3520136007298742,0.16837960058355547,0.13424224549151464,0.0,9.0,0.0,1.0,3.862365679999999 -probe,boston_housing,linear,11.0,1.007249276669714,0.09299927052223386,0.09963742976493017,0.0,9.0,,, -probe,boston_housing,linear,13.0,1.3172484222248864,0.15939193355054326,0.12997258850868726,0.0,9.0,,, -probe,boston_housing,linear,12.0,1.2995529090837012,0.15490400031856688,0.12785010134920172,0.0,9.0,,, -probe,boston_housing,linear,12.0,1.0059221718260045,0.09273649552678494,0.09945072011744727,0.0,9.0,,, -roar,adult,linear,5.0,9.618374680646278,18.55040773192179,2.03277587890625,1.0,5.0,0.05999999999999994,1.0,1.0735000799999999 -roar,adult,linear,5.0,8.90917690170632,15.907446315902012,1.8774079084396362,1.0,5.0,,, -roar,adult,linear,5.0,14.060748848458747,39.60930977247996,2.9490909576416016,1.0,5.0,,, -roar,adult,linear,5.0,11.879894103582657,28.27958438286244,2.4991862773895264,1.0,5.0,,, +probe,boston_housing,linear,12.0,1.3520136007298742,0.1683796005835554,0.1342422454915146,0.0,9.0,0.0,1.0,3.862365679999999 +probe,boston_housing,linear,11.0,1.007249276669714,0.0929992705222338,0.0996374297649301,0.0,9.0,,, +probe,boston_housing,linear,13.0,1.3172484222248864,0.1593919335505432,0.1299725885086872,0.0,9.0,,, +probe,boston_housing,linear,12.0,1.2995529090837012,0.1549040003185668,0.1278501013492017,0.0,9.0,,, +probe,boston_housing,linear,12.0,1.0059221718260043,0.0927364955267849,0.0994507201174472,0.0,9.0,,, +roar,adult,linear,5.0,9.618374680646278,18.55040773192179,2.03277587890625,1.0,5.0,0.0599999999999999,1.0,1.07350008 +roar,adult,linear,5.0,8.90917690170632,15.907446315902012,1.877407908439636,1.0,5.0,,, +roar,adult,linear,5.0,14.060748848458749,39.60930977247996,2.9490909576416016,1.0,5.0,,, +roar,adult,linear,5.0,11.879894103582656,28.27958438286244,2.4991862773895264,1.0,5.0,,, roar,adult,linear,5.0,15.001941947183386,45.09081117914663,3.1452958583831787,1.0,5.0,,, roar,adult,linear,5.0,7.214394521543291,10.432608884307578,1.521523356437683,1.0,5.0,,, -roar,adult,linear,5.0,8.701402318683146,15.156521293738738,1.8138915300369263,1.0,5.0,,, -roar,adult,linear,5.0,9.1609844435552,16.823023459339645,1.9357142242501106,1.0,5.0,,, -roar,adult,linear,5.0,7.771029084896999,12.107500995719537,1.6392885446548462,1.0,5.0,,, +roar,adult,linear,5.0,8.701402318683146,15.156521293738738,1.8138915300369265,1.0,5.0,,, +roar,adult,linear,5.0,9.1609844435552,16.823023459339645,1.9357142242501104,1.0,5.0,,, +roar,adult,linear,5.0,7.771029084896999,12.107500995719535,1.6392885446548462,1.0,5.0,,, roar,adult,linear,5.0,10.990124843014152,24.19951043751935,2.305537700653076,1.0,5.0,,, -roar,compass,linear,1.0,1.9990301571394267,3.996121569152881,1.9990301571394267,0.0,0.0,0.09999999999999998,1.0,0.36033723 -roar,compass,linear,1.0,1.949421625388296,3.8002446735315463,1.949421625388296,0.0,0.0,,, -roar,compass,linear,1.0,1.962879739309612,3.8528968709921703,1.962879739309612,0.0,0.0,,, -roar,compass,linear,1.0,1.9990301571394267,3.996121569152881,1.9990301571394267,0.0,0.0,,, -roar,compass,linear,1.0,1.962879739309612,3.8528968709921703,1.962879739309612,0.0,0.0,,, +roar,compass,linear,1.0,1.9990301571394269,3.996121569152881,1.9990301571394269,0.0,0.0,0.0999999999999999,1.0,0.36033723 +roar,compass,linear,1.0,1.949421625388296,3.8002446735315454,1.949421625388296,0.0,0.0,,, +roar,compass,linear,1.0,1.962879739309612,3.85289687099217,1.962879739309612,0.0,0.0,,, +roar,compass,linear,1.0,1.9990301571394269,3.996121569152881,1.9990301571394269,0.0,0.0,,, +roar,compass,linear,1.0,1.962879739309612,3.85289687099217,1.962879739309612,0.0,0.0,,, roar,compass,linear,1.0,1.9290034770965576,3.7210544146506095,1.9290034770965576,0.0,0.0,,, roar,compass,linear,1.0,2.0357320371427035,4.144204927049182,2.0357320371427035,0.0,0.0,,, roar,compass,linear,1.0,2.1320743874499675,4.545741193620154,2.1320743874499675,0.0,0.0,,, -roar,compass,linear,1.0,1.8982458051882292,3.6033371369147082,1.8982458051882292,0.0,0.0,,, +roar,compass,linear,1.0,1.8982458051882287,3.603337136914708,1.8982458051882287,0.0,0.0,,, roar,compass,linear,1.0,2.020757461849012,4.083460719618461,2.020757461849012,0.0,0.0,,, -roar,credit,linear,9.0,7.554007342605132,9.557016585065332,1.3854458826834435,0.0,9.0,0.56,1.0,0.34471227999999987 -roar,credit,linear,9.0,8.263296380535063,11.415951032296215,1.4944425821304321,0.0,9.0,,, -roar,credit,linear,9.0,7.9519669217018265,10.761187640390776,1.4525296688079834,0.0,9.0,,, +roar,credit,linear,9.0,7.554007342605132,9.557016585065332,1.3854458826834437,0.0,9.0,0.56,1.0,0.3447122799999998 +roar,credit,linear,9.0,8.263296380535063,11.415951032296215,1.494442582130432,0.0,9.0,,, +roar,credit,linear,9.0,7.951966921701826,10.761187640390776,1.4525296688079834,0.0,9.0,,, roar,credit,linear,9.0,8.895464097377914,12.381827340968108,1.553907871246338,0.0,9.0,,, roar,credit,linear,9.0,8.086953388493937,10.873458234077606,1.4708586931228638,0.0,9.0,,, roar,credit,linear,9.0,7.626236090316823,9.78442853097598,1.3725225623943462,0.0,9.0,,, @@ -1581,53 +1581,85 @@ roar,credit,linear,9.0,7.347324759390659,9.099771242797132,1.3593943213365112,0. roar,credit,linear,9.0,8.573404865122036,11.75128423288558,1.5116277933120728,0.0,9.0,,, roar,credit,linear,9.0,8.33025701592436,11.508674587603588,1.4947115182876587,0.0,9.0,,, roar,credit,linear,9.0,7.710231722738529,10.061499167008163,1.3989948133241032,0.0,9.0,,, -roar,german,linear,3.0,4.393869430113556,6.436350331880366,1.4821730782003963,1.0,3.0,0.64,1.0,0.26295274999999985 +roar,german,linear,3.0,4.393869430113556,6.436350331880366,1.4821730782003963,1.0,3.0,0.64,1.0,0.2629527499999998 roar,german,linear,3.0,4.420399537046276,6.51395000548823,1.488461641704335,1.0,3.0,,, roar,german,linear,3.0,4.510777499421352,6.783845064152101,1.5241791430641622,1.0,3.0,,, roar,german,linear,3.0,4.90096334740276,8.00720323400812,1.6497113634558285,1.0,3.0,,, roar,german,linear,3.0,4.731461251187688,7.46303305625362,1.5936195429633646,1.0,3.0,,, roar,german,linear,3.0,4.459388874376304,6.629541394896777,1.502940486459171,1.0,3.0,,, roar,german,linear,3.0,4.74306260843189,7.500255258592999,1.6012532290290384,1.0,3.0,,, -roar,german,linear,3.0,4.373869582470783,6.377722157020068,1.4742735554190243,1.0,3.0,,, +roar,german,linear,3.0,4.373869582470783,6.377722157020068,1.4742735554190245,1.0,3.0,,, roar,german,linear,3.0,4.663053273002536,7.249088722063662,1.572682780378005,1.0,3.0,,, roar,german,linear,3.0,4.542673939111683,6.879473442090738,1.530921739690444,1.0,3.0,,, -roar,mortgage,linear,2.0,2.522274310280813,3.1810537599922295,1.2688802759890785,0.0,2.0,1.0,1.0,0.18166484000000055 +roar,mortgage,linear,2.0,2.522274310280813,3.181053759992229,1.2688802759890785,0.0,2.0,1.0,1.0,0.1816648400000005 roar,mortgage,linear,2.0,2.5232224875324696,3.1834452314856985,1.269336872896814,0.0,2.0,,, -roar,mortgage,linear,2.0,2.7183817091441353,3.6949330905700264,1.3673619091347047,0.0,2.0,,, +roar,mortgage,linear,2.0,2.718381709144136,3.6949330905700255,1.367361909134705,0.0,2.0,,, roar,mortgage,linear,2.0,2.608298671002297,3.401736856378892,1.3120827396481105,0.0,2.0,,, -roar,mortgage,linear,2.0,2.620706989412102,3.4341788395491415,1.318299480246587,0.0,2.0,,, +roar,mortgage,linear,2.0,2.620706989412102,3.434178839549141,1.318299480246587,0.0,2.0,,, roar,mortgage,linear,2.0,2.403475681129275,2.888457717914158,1.2091554892568348,0.0,2.0,,, roar,mortgage,linear,2.0,2.450540268716643,3.002683193812852,1.2326657246638628,0.0,2.0,,, -roar,mortgage,linear,2.0,2.4390385482041332,2.974561971387154,1.2268490617623902,0.0,2.0,,, -roar,mortgage,linear,2.0,2.4878201699859215,3.094736946601415,1.251405000170108,0.0,2.0,,, +roar,mortgage,linear,2.0,2.439038548204133,2.974561971387154,1.2268490617623902,0.0,2.0,,, +roar,mortgage,linear,2.0,2.487820169985921,3.094736946601415,1.251405000170108,0.0,2.0,,, roar,mortgage,linear,2.0,2.6317276458647223,3.463123228227851,1.323864673621967,0.0,2.0,,, -roar,twomoon,linear,2.0,2.4764774068615765,3.0665925466661035,1.2460608936031168,0.0,2.0,1.0,1.0,0.18425088999999986 +roar,twomoon,linear,2.0,2.4764774068615765,3.0665925466661035,1.2460608936031168,0.0,2.0,1.0,1.0,0.1842508899999998 roar,twomoon,linear,2.0,2.858603942178217,4.086036929781432,1.43999497494707,0.0,2.0,,, -roar,twomoon,linear,2.0,2.5008486370743714,3.1272062295555054,1.2569157273648117,0.0,2.0,,, +roar,twomoon,linear,2.0,2.5008486370743714,3.127206229555505,1.2569157273648115,0.0,2.0,,, roar,twomoon,linear,2.0,2.645121390713767,3.4984377313386865,1.3297768415901543,0.0,2.0,,, roar,twomoon,linear,2.0,2.7621854638776338,3.815002904890798,1.3902752278508608,0.0,2.0,,, -roar,twomoon,linear,2.0,2.4009663696214956,2.8824744525108477,1.2092780265982686,0.0,2.0,,, -roar,twomoon,linear,2.0,2.9331257801327335,4.3018914311168075,1.4783529298707638,0.0,2.0,,, -roar,twomoon,linear,2.0,2.5625340565643286,3.2833589723685295,1.2871226598668906,0.0,2.0,,, -roar,twomoon,linear,2.0,2.9081314364424156,4.228884909411611,1.4656993676765149,0.0,2.0,,, +roar,twomoon,linear,2.0,2.400966369621496,2.882474452510848,1.2092780265982686,0.0,2.0,,, +roar,twomoon,linear,2.0,2.933125780132733,4.301891431116808,1.4783529298707638,0.0,2.0,,, +roar,twomoon,linear,2.0,2.5625340565643286,3.283358972368529,1.2871226598668906,0.0,2.0,,, +roar,twomoon,linear,2.0,2.908131436442416,4.228884909411611,1.4656993676765149,0.0,2.0,,, roar,twomoon,linear,2.0,2.4759695839771863,3.0652820601773967,1.2438741802130255,0.0,2.0,,, -roar,breast_cancer,linear,30.0,14.373579104507138,9.884220992183279,0.8551746366616385,0.0,30.0,0.96,1.0,0.15464467000000043 +roar,breast_cancer,linear,30.0,14.373579104507138,9.88422099218328,0.8551746366616385,0.0,30.0,0.96,1.0,0.1546446700000004 roar,breast_cancer,linear,30.0,14.399088388008732,9.922358630721428,0.8608574424742395,0.0,30.0,,, roar,breast_cancer,linear,30.0,16.032786668978176,11.929192668337192,0.9031640211284282,0.0,30.0,,, -roar,breast_cancer,linear,30.0,14.850128897456901,10.449302527758986,0.8781975568034549,0.0,30.0,,, +roar,breast_cancer,linear,30.0,14.8501288974569,10.449302527758986,0.8781975568034549,0.0,30.0,,, roar,breast_cancer,linear,30.0,14.726044261421514,10.28244268308281,0.8661585435652516,0.0,30.0,,, roar,breast_cancer,linear,30.0,15.537713827643511,11.241001199547258,0.8975186660490515,0.0,30.0,,, -roar,breast_cancer,linear,30.0,14.481708189901077,9.937685067357704,0.8491318842156277,0.0,30.0,,, +roar,breast_cancer,linear,30.0,14.481708189901076,9.937685067357704,0.8491318842156277,0.0,30.0,,, roar,breast_cancer,linear,30.0,15.739486551285166,11.493059453698454,0.8864064783030617,0.0,30.0,,, -roar,breast_cancer,linear,30.0,15.269408822886342,10.763906118835889,0.8675779687949952,0.0,30.0,,, -roar,breast_cancer,linear,29.0,14.430499716369502,9.938021989590801,0.8600959622803224,0.0,30.0,,, -roar,boston_housing,linear,11.0,8.42288314102019,7.103838973593407,0.8714885698512165,0.0,11.0,0.43999999999999995,1.0,0.20509229999999973 -roar,boston_housing,linear,12.0,10.290152808681901,10.43472011904247,1.0566676573511253,0.0,12.0,,, +roar,breast_cancer,linear,30.0,15.269408822886342,10.763906118835887,0.8675779687949952,0.0,30.0,,, +roar,breast_cancer,linear,29.0,14.430499716369502,9.9380219895908,0.8600959622803224,0.0,30.0,,, +roar,boston_housing,linear,11.0,8.42288314102019,7.103838973593407,0.8714885698512165,0.0,11.0,0.4399999999999999,1.0,0.2050922999999997 +roar,boston_housing,linear,12.0,10.2901528086819,10.43472011904247,1.0566676573511251,0.0,12.0,,, roar,boston_housing,linear,12.0,11.124564173221437,12.06185503704698,1.153862599505494,0.0,12.0,,, -roar,boston_housing,linear,12.0,11.754756273996955,13.481781798027901,1.2174043665921714,0.0,12.0,,, -roar,boston_housing,linear,11.0,8.487735325674985,7.2130684100021325,0.8777667476115111,0.0,11.0,,, -roar,boston_housing,linear,12.0,9.630238564130142,9.003167798871797,0.9946703360833343,0.0,12.0,,, +roar,boston_housing,linear,12.0,11.754756273996955,13.4817817980279,1.2174043665921714,0.0,12.0,,, +roar,boston_housing,linear,11.0,8.487735325674985,7.213068410002132,0.8777667476115111,0.0,11.0,,, +roar,boston_housing,linear,12.0,9.630238564130142,9.003167798871797,0.9946703360833344,0.0,12.0,,, roar,boston_housing,linear,12.0,10.45135350677032,10.479898040589251,1.0693284107359828,0.0,12.0,,, -roar,boston_housing,linear,12.0,9.480605434904518,8.787099818607752,0.9835587124161373,0.0,12.0,,, -roar,boston_housing,linear,12.0,9.171005273011975,8.215278014423253,0.9512599227707381,0.0,12.0,,, -roar,boston_housing,linear,11.0,11.091221900347287,12.334272797155895,1.1652295128925483,0.0,11.0,,, +roar,boston_housing,linear,12.0,9.480605434904518,8.787099818607752,0.9835587124161372,0.0,12.0,,, +roar,boston_housing,linear,12.0,9.171005273011977,8.215278014423253,0.951259922770738,0.0,12.0,,, +roar,boston_housing,linear,11.0,11.091221900347287,12.334272797155895,1.1652295128925485,0.0,11.0,,, +rbr,twomoon,mlp,2.0,0.7286989127039623,0.2674033401335881,0.3951900744198769,0.0,1.0,1.0,1.0,4.879103590000001 +rbr,twomoon,mlp,2.0,0.6894799992967063,0.2397964433977159,0.3771830935407586,0.0,1.0,,, +rbr,twomoon,mlp,2.0,0.1409640647837522,0.0189301471125527,0.13754436657573,0.0,1.0,,, +rbr,twomoon,mlp,2.0,0.3849629894626554,0.1410744506732003,0.3754790343940074,0.0,1.0,,, +rbr,twomoon,mlp,2.0,1.002957846445284,0.8269444367229775,0.9039601127519769,0.0,1.0,,, +rbr,twomoon,mlp,2.0,0.1446472630709976,0.0109513530407096,0.0879751120364685,0.0,1.0,,, +rbr,twomoon,mlp,2.0,0.2456580453177645,0.0308932669559418,0.1417938498564787,0.0,1.0,,, +rbr,twomoon,mlp,2.0,0.3707915824245534,0.0714714848971661,0.2223300984259902,0.0,1.0,,, +rbr,twomoon,mlp,2.0,0.373523153433866,0.127161158046786,0.3561743639721993,0.0,1.0,,, +rbr,twomoon,mlp,2.0,0.6861088172443294,0.265889329731744,0.4665790522798936,0.0,1.0,,, +rbr,compass,mlp,7.0,0.8117256915000708,0.1751537979559963,0.2588201761245727,5.0,5.0,0.4,1.0,45.98423136 +rbr,compass,mlp,7.0,1.000752511777376,0.4067629195629747,0.479181706905365,5.0,6.0,,, +rbr,compass,mlp,7.0,0.7570992925841558,0.396842765160404,0.6263000965118408,5.0,6.0,,, +rbr,compass,mlp,7.0,0.7593743661418557,0.1561166400465182,0.2451530694961547,5.0,5.0,,, +rbr,compass,mlp,7.0,2.095355091420444,1.0422519967563235,0.5292404294013977,5.0,4.0,,, +rbr,adult,mlp,,,,,,,,0.0,0.0905930000000001 +rbr,credit,mlp,20.0,0.965264297888805,0.0794397720991071,0.1038587912917137,5.0,20.0,0.0,1.0,59.347741940000006 +rbr,credit,mlp,20.0,0.566989452251398,0.0259825727896292,0.0628717467188835,5.0,20.0,,, +rbr,credit,mlp,20.0,1.1195734536322235,0.0959842938489572,0.1067807227373123,5.0,19.0,,, +rbr,credit,mlp,20.0,2.84648185800286,0.7871216235889688,0.3118891716003418,5.0,15.0,,, +rbr,credit,mlp,20.0,4.035248189802385,1.4515882305083423,0.3944755792617798,5.0,15.0,,, +rbr,german,mlp,7.0,0.4157270057639756,0.0748703424622665,0.263138996789961,5.0,6.0,0.3199999999999999,1.0,18.5585004 +rbr,german,mlp,7.0,0.915372353215671,0.5424925760678092,0.7276125465120588,5.0,6.0,,, +rbr,german,mlp,7.0,0.5011241306168176,0.0910087649095372,0.2593848833329897,5.0,5.0,,, +rbr,german,mlp,7.0,0.4318845116361215,0.081656268455909,0.2743761845484836,5.0,6.0,,, +rbr,german,mlp,7.0,0.8469910197915712,0.2895495604187955,0.5005260705947876,5.0,6.0,,, +rbr,boston_housing,mlp,13.0,1.4352224063896322,0.3736564812519745,0.4282479539830633,0.0,10.0,0.3199999999999999,1.0,12.221076720000005 +rbr,boston_housing,mlp,13.0,1.3238735117244025,0.2947727664453581,0.3508007670036611,0.0,9.0,,, +rbr,boston_housing,mlp,13.0,1.3824252398802377,0.3149165118214244,0.4218325881247825,0.0,10.0,,, +rbr,boston_housing,mlp,13.0,0.7209683333367833,0.0688352053038206,0.153095543384552,0.0,12.0,,, +rbr,boston_housing,mlp,13.0,1.0697691839818662,0.1521799871864383,0.2351901829242706,0.0,10.0,,, +rbr,breast_cancer,mlp,,,,,,,,0.0,0.006507019999999741 diff --git a/experiments/run_experiment.py b/experiments/run_experiment.py index 920b189..fbc7152 100644 --- a/experiments/run_experiment.py +++ b/experiments/run_experiment.py @@ -170,6 +170,11 @@ def initialize_recourse_method( return Probe(mlmodel, hyperparams) elif method == "roar": return Roar(mlmodel, hyperparams) + elif method == "rbr": + hyperparams["train_data"] = data.df_train.drop(columns=["y"], axis=1) + dev = torch.device("cuda" if torch.cuda.is_available() else "cpu") + hyperparams["device"] = dev + return RBR(mlmodel, hyperparams) else: raise ValueError("Recourse method not known") @@ -199,7 +204,7 @@ def create_parser(): -r, --recourse_method: Specifies recourse methods for the experiment. Default: ["dice", "cchvae", "cem", "cem_vae", "clue", "cruds", "face_knn", "face_epsilon", "gs", "mace", "revise", "wachter"]. Choices: ["dice", "ar", "causal_recourse", "cchvae", "cem", "cem_vae", "claproar", "clue", "cruds", "face_knn", "face_epsilon", "feature_tweak", - "focus", "gravitational", "greedy", "gs", "mace", "revise", "wachter", "cfvae", "roar", "probe"]. + "focus", "gravitational", "greedy", "gs", "mace", "revise", "wachter", "cfvae", "roar", "probe", "rbr"]. -n, --number_of_samples: Specifies the number of instances per dataset. Default: 20. -s, --train_split: Specifies the split of the available data used for training. @@ -292,6 +297,7 @@ def create_parser(): "cfvae", "probe", "roar", + "rbr", ], help="Recourse methods for experiment", ) @@ -374,6 +380,7 @@ def create_parser(): "cfvae", "probe", "roar", + "rbr", ] sklearn_methods = ["feature_tweak", "focus", "mace"] diff --git a/methods/__init__.py b/methods/__init__.py index 06b1055..e5cac55 100644 --- a/methods/__init__.py +++ b/methods/__init__.py @@ -8,6 +8,7 @@ CRUD, FOCUS, MACE, + RBR, ActionableRecourse, CausalRecourse, ClaPROAR, diff --git a/methods/catalog/__init__.py b/methods/catalog/__init__.py index 07bee46..45b4ea1 100644 --- a/methods/catalog/__init__.py +++ b/methods/catalog/__init__.py @@ -16,6 +16,7 @@ from .growing_spheres import GrowingSpheres from .mace import MACE from .probe import Probe +from .rbr import RBR from .revise import Revise from .roar import Roar from .wachter import Wachter diff --git a/methods/catalog/probe/library/probe.py b/methods/catalog/probe/library/probe.py index b261d86..3ecea4c 100644 --- a/methods/catalog/probe/library/probe.py +++ b/methods/catalog/probe/library/probe.py @@ -71,14 +71,15 @@ def perturb_sample(x, n_samples, sigma2): return X + eps -def reparametrization_trick(mu, sigma2, n_samples): +def reparametrization_trick(mu, sigma2, device, n_samples): # var = torch.eye(mu.shape[1]) * sigma2 - std = torch.sqrt(sigma2) + std = torch.sqrt(sigma2).to(device) epsilon = MultivariateNormal( loc=torch.zeros(mu.shape[1]), covariance_matrix=torch.eye(mu.shape[1]) ) epsilon = epsilon.sample((n_samples,)) # standard Gaussian random noise - ones = torch.ones_like(epsilon) + epsilon = epsilon.to(device) + ones = torch.ones_like(epsilon).to(device) random_samples = mu.reshape(-1) * ones + std * epsilon return random_samples @@ -176,7 +177,9 @@ def probe_recourse( costs = [] ces = [] - random_samples = reparametrization_trick(x_new, noise_variance, n_samples=1000) + random_samples = reparametrization_trick( + x_new, noise_variance, device, n_samples=1000 + ) invalidation_rate = compute_invalidation_rate(torch_model, random_samples) while (f_x_new <= DECISION_THRESHOLD) or ( @@ -226,7 +229,7 @@ def probe_recourse( optimizer.step() random_samples = reparametrization_trick( - x_new, noise_variance, n_samples=10000 + x_new, noise_variance, device, n_samples=10000 ) invalidation_rate = compute_invalidation_rate(torch_model, random_samples) diff --git a/methods/catalog/rbr/__init__.py b/methods/catalog/rbr/__init__.py new file mode 100644 index 0000000..4bf61f9 --- /dev/null +++ b/methods/catalog/rbr/__init__.py @@ -0,0 +1,3 @@ +# flake8: noqa + +from .model import RBR diff --git a/methods/catalog/rbr/library/__init__.py b/methods/catalog/rbr/library/__init__.py new file mode 100644 index 0000000..cffae73 --- /dev/null +++ b/methods/catalog/rbr/library/__init__.py @@ -0,0 +1,3 @@ +# flake8: noqa + +from .rbr_loss import robust_bayesian_recourse diff --git a/methods/catalog/rbr/library/rbr_loss.py b/methods/catalog/rbr/library/rbr_loss.py new file mode 100644 index 0000000..f75ac67 --- /dev/null +++ b/methods/catalog/rbr/library/rbr_loss.py @@ -0,0 +1,594 @@ +# methods/catalog/rbr/library.py +import math +from typing import Any, Optional, Sequence + +import numpy as np +import torch +from sklearn.utils import check_random_state + +""" +This code is largely ported over from the original authors codebase. +Light restructuring and modifications have been made in order to make it compatible with CARLAs structure. + +Original code can be found at: https://github.com/VinAIResearch/robust-bayesian-recourse +""" + +# ---------- low-level helpers & projections ---------- + + +@torch.no_grad() +def l2_projection(x: torch.Tensor, radius: float) -> torch.Tensor: + """ + Euclidean projection onto an L2-ball for last axis. + x: shape (..., d) + radius: scalar + """ + norm = torch.linalg.norm(x, ord=2, axis=-1) + # avoid divide by zero + denom = torch.max(norm, torch.tensor(radius, device=x.device)) + scale = (radius / denom).unsqueeze(1) + return scale * x + + +# In the original code but never seemed to be used +def reconstruct_encoding_constraints(x: torch.Tensor, cat_pos: Optional[Sequence[int]]): + x_enc = x.clone() + for pos in cat_pos: + x_enc.data[pos] = torch.clamp(torch.round(x_enc[pos]), 0, 1) + return x_enc + + +# ---------- likelihood modules ---------- + + +class OptimisticLikelihood(torch.nn.Module): + def __init__( + self, + x_dim: torch.Tensor, + epsilon_op: torch.Tensor, + sigma: torch.Tensor, + device: torch.device, + ): + super().__init__() + self.device = device + self.x_dim = x_dim.to(self.device) + self.epsilon_op = epsilon_op.to(self.device) + self.sigma = sigma.to(self.device) + + @torch.no_grad() + def projection(self, v: torch.Tensor) -> torch.Tensor: + v = v.clone() + v = torch.max(v, torch.tensor(0, device=self.device)) + result = l2_projection(v, float(self.epsilon_op)) + return result.to(self.device) + + def _forward(self, v: torch.Tensor, x: torch.Tensor, x_feas: torch.Tensor): + c = torch.linalg.norm(x - x_feas, axis=-1) + d = v[..., 1] + self.sigma + p = self.x_dim + L = ( + torch.log(d) + + (c - v[..., 0]) ** 2 / (2 * d**2) + + (p - 1) * torch.log(self.sigma) + ) + return L + + def forward(self, v: torch.Tensor, x: torch.Tensor, x_feas: torch.Tensor): + c = torch.linalg.norm(x - x_feas, axis=-1) + d = v[..., 1] + self.sigma + p = self.x_dim + + L = ( + torch.log(d) + + (c - v[..., 0]) ** 2 / (2 * d**2) + + (p - 1) * torch.log(self.sigma) + ) + + v_grad = torch.zeros_like(v, device=self.device) + v_grad[..., 0] = -(c - v[..., 0]) / d**2 + v_grad[..., 1] = 1 / d - (c - v[..., 0]) ** 2 / d**3 + + return L, v_grad + + def optimize( + self, + x: torch.Tensor, + x_feas: torch.Tensor, + max_iter: int = int(1e3), + verbose: bool = False, + ): + v = torch.zeros([x.shape[0], 2], device=self.device) + lr = 1 / torch.sqrt(torch.tensor(max_iter, device=self.device).float()) + + loss_diff = 1.0 + min_loss = float("inf") + num_stable_iter = 0 + max_stable_iter = 10 + + for t in range(max_iter): + F, grad = self.forward(v, x, x_feas) + v = self.projection(v - lr * grad) + + loss_sum = F.sum().data.item() + loss_diff = min_loss - loss_sum + if loss_diff <= 1e-10: + num_stable_iter += 1 + if num_stable_iter >= max_stable_iter: + break + else: + num_stable_iter = 0 + min_loss = min(min_loss, loss_sum) + if verbose and (t % 200 == 0): + print(f"[Optimistic] iter {t} loss {loss_sum:.6f}") + return v + + +class PessimisticLikelihood(torch.nn.Module): + def __init__( + self, + x_dim: torch.Tensor, + epsilon_pe: torch.Tensor, + sigma: torch.Tensor, + device: torch.device, + ): + super().__init__() + self.device = device + self.epsilon_pe = epsilon_pe.to(self.device) + self.sigma = sigma.to(self.device) + self.x_dim = x_dim.to(self.device) + + @torch.no_grad() + def projection(self, u: torch.Tensor) -> torch.Tensor: + u = u.clone() + u = torch.max(u, torch.tensor(0, device=self.device)) + result = l2_projection(u, float(self.epsilon_pe) / math.sqrt(float(self.x_dim))) + return result.to(self.device) + + def _forward( + self, u: torch.Tensor, x: torch.Tensor, x_feas: torch.Tensor, zeta: float = 1e-6 + ): + c = torch.linalg.norm(x - x_feas, axis=-1) + d = u[..., 1] + self.sigma + p = self.x_dim + # p = p.float() + sqrt_p = torch.sqrt(p.float()) + + inside = (zeta + self.epsilon_pe**2 - p * u[..., 0] ** 2 - u[..., 1] ** 2) / ( + p - 1 + ) + # f = torch.sqrt(torch.maximum(inside, torch.tensor(1e-12, device=self.device))) + f = torch.sqrt(inside) + + L = ( + -torch.log(d) + - (c + sqrt_p * u[..., 0]) ** 2 / (2 * d**2) + - (p - 1) * torch.log(f + self.sigma) + ) + return L + + def forward( + self, u: torch.Tensor, x: torch.Tensor, x_feas: torch.Tensor, zeta: float = 1e-6 + ): + c = torch.linalg.norm(x - x_feas, axis=-1) + d = u[..., 1] + self.sigma + p = self.x_dim + + # p = p.float() # issue with support with int tensors when taking sqrt? + + sqrt_p = torch.sqrt(p.float()) + inside = (zeta + self.epsilon_pe**2 - p * u[..., 0] ** 2 - u[..., 1] ** 2) / ( + p - 1 + ) + # f = torch.sqrt(torch.maximum(inside, torch.tensor(1e-12, device=self.device))) + f = torch.sqrt(inside) + + L = ( + -torch.log(d) + - (c + sqrt_p * u[..., 0]) ** 2 / (2 * d**2) + - (p - 1) * torch.log(f + self.sigma) + ) + + u_grad = torch.zeros_like(u, device=self.device) + u_grad[..., 0] = -sqrt_p * (c + sqrt_p * u[..., 0]) / d**2 - ( + p * u[..., 0] + ) / (f * (f + self.sigma)) + u_grad[..., 1] = ( + -1 / d + + (c + sqrt_p * u[..., 0]) ** 2 / d**3 + + u[..., 1] / (f * (f + self.sigma)) + ) + + return L, u_grad + + def optimize( + self, + x: torch.Tensor, + x_feas: torch.Tensor, + max_iter: int = int(1e3), + verbose: bool = False, + ): + u = torch.zeros([x.shape[0], 2], device=self.device) + lr = 1.0 / torch.sqrt(torch.tensor(max_iter, device=self.device).float()) + + loss_diff = 1.0 + min_loss = float("inf") + num_stable_iter = 0 + max_stable_iter = 10 + + for t in range(max_iter): + F, grad = self.forward(u, x, x_feas) + u = self.projection(u - lr * grad) + + loss_sum = F.sum().data.item() + loss_diff = min_loss - loss_sum + + if loss_diff <= 1e-10: + num_stable_iter += 1 + if num_stable_iter >= max_stable_iter: + break + else: + num_stable_iter = 0 + min_loss = min(min_loss, loss_sum) + if verbose and (t % 200 == 0): + print(f"[Pessimistic] iter {t} loss {loss_sum:.6f}") + return u + + +# ---------- RBRLoss wrapper ---------- + + +class RBRLoss(torch.nn.Module): + def __init__( + self, + X_feas: torch.Tensor, + X_feas_pos: torch.Tensor, + X_feas_neg: torch.Tensor, + epsilon_op: float, + epsilon_pe: float, + sigma: float, + device: torch.device, + verbose: bool = False, + ): + super(RBRLoss, self).__init__() + self.device = device + self.verbose = verbose + + self.X_feas = X_feas.to(self.device) + self.X_feas_pos = X_feas_pos.to(self.device) + self.X_feas_neg = X_feas_neg.to(self.device) + + self.epsilon_op = torch.tensor(epsilon_op, device=self.device) + self.epsilon_pe = torch.tensor(epsilon_pe, device=self.device) + self.sigma = torch.tensor(sigma, device=self.device) + self.x_dim = torch.tensor(X_feas.shape[-1], device=self.device) + + # print("This is epsilon op: ", self.epsilon_op) + # print("This is epsilon pe: ", self.epsilon_pe) + + self.op_likelihood = OptimisticLikelihood( + self.x_dim, self.epsilon_op, self.sigma, self.device + ) + self.pe_likelihood = PessimisticLikelihood( + self.x_dim, self.epsilon_pe, self.sigma, self.device + ) + + def forward(self, x: torch.Tensor, verbose: bool = False): + if verbose or self.verbose: + print(f"N_neg: {self.X_feas_neg.shape}, N_pos: {self.X_feas_pos.shape}") + + # pessimistic part + # if self.X_feas_pos.shape[0] > 0: + u = self.pe_likelihood.optimize( + x.detach().clone().expand([self.X_feas_pos.shape[0], -1]), + self.X_feas_pos, + verbose=self.verbose, + ) + F_pe = self.pe_likelihood._forward( + u, x.expand([self.X_feas_pos.shape[0], -1]), self.X_feas_pos + ) + denom = torch.logsumexp(F_pe, -1) + # else: + # denom = torch.tensor(0.0, device=self.device) + + # optimistic part + # if self.X_feas_neg.shape[0] > 0: + v = self.op_likelihood.optimize( + x.detach().clone().expand([self.X_feas_neg.shape[0], -1]), + self.X_feas_neg, + verbose=self.verbose, + ) + F_op = self.op_likelihood._forward( + v, x.expand([self.X_feas_neg.shape[0], -1]), self.X_feas_neg + ) + numer = torch.logsumexp(-F_op, -1) + # else: + # numer = torch.tensor(0.0, device=self.device) + + result = numer - denom + return result, denom, numer + + +# ---------- high-level RBR generator (callable used by CARLA wrapper) ---------- + + +def robust_bayesian_recourse( + raw_model: Any, + x0: np.ndarray, + cat_features_indices: Optional[Sequence[int]] = None, + train_data: Optional[np.ndarray] = None, + num_samples: int = 200, + perturb_radius: float = 0.2, + delta_plus: float = 1.0, + sigma: float = 1.0, + epsilon_op: float = 0.5, + epsilon_pe: float = 1.0, + max_iter: int = 1000, + dev: str = "cpu", + random_state: Optional[int] = None, + verbose: bool = False, +) -> np.ndarray: + + # helper to call raw_model.predict consistently + def predict_fn_np(x): + # raw_model might accept (n,d) and return probs or labels + # preds_tensor = raw_model.predict(x) + + # # print(f"This is the pred_tensor {preds_tensor}") + + # if preds_tensor.ndim == 1: + # preds_tensor = preds_tensor.unsqueeze(0) + + # preds = preds_tensor.cpu().detach().numpy() + # # print(f"The prediction is {preds} before numpy array") + # preds = np.asarray(preds) + + # # convert to single-label 0/1 if probabilities provided + # if preds.ndim == 2 and preds.shape[1] > 1: + # return preds.argmax(axis=1) + # if preds.dtype.kind in ("f",): + # return (preds >= 0.5).astype(int).squeeze() + # preds = preds.astype(int) + + # if x.ndim == 1: + # return preds.squeeze() + # return preds + # return torch.tensor(raw_model.predict(x.cpu().detach())) + # Ensure 2D shape: (batch_size, outputs) + preds_tensor = raw_model.predict(x) + + # Ensure 2D batch shape + if preds_tensor.ndim == 1: + preds_tensor = preds_tensor.unsqueeze(0) + + # Move to CPU numpy + preds = preds_tensor.detach().cpu().numpy() + + # ---- CASE 1: Softmax output (shape: [N, 2+]) ---- + if preds.ndim == 2 and preds.shape[1] > 1: + preds = preds.argmax(axis=1) + + # ---- CASE 2: Sigmoid or 1D probability (shape: [N]) ---- + elif preds.dtype.kind == "f": + + # If shape is Nx1 → squeeze to N + if preds.ndim == 2 and preds.shape[1] == 1: + preds = preds.squeeze() + + # If probabilities → threshold + preds = (preds >= 0.5).astype(int) + + # ---- CASE 3: Raw logits (single value per instance) ---- + elif preds.ndim == 2 and preds.shape[1] == 1: + probs = 1 / (1 + np.exp(-preds)) + preds = (probs >= 0.5).astype(int) + + # ---- Fallback ---- + else: + preds = preds.astype(int) + + # --- FINAL STEP: return scalar if batch size = 1 --- + if preds.size == 1: + # print("This is the prediction ", int(preds.item())) + return int(preds.item()) + + # print("This is the prediction ", preds) + return preds + + # find boundary point between x0 and nearest opposite-label train point + def dist(a: torch.Tensor, b: torch.Tensor): + return torch.linalg.norm(a - b, ord=1, axis=-1) + + # feasible set sampled around x_b + def uniform_ball(x: torch.Tensor, r: float, n: int, rng_state): + rng_local = check_random_state(rng_state) + # print(f"this is x: {x}") + d = x.shape[0] + # print(d) + V = rng_local.randn(n, d) + V = V / np.linalg.norm(V, axis=1).reshape(-1, 1) + V = V * (rng_local.random(n) ** (1.0 / d)).reshape(-1, 1) + V = V * r + x.cpu().numpy() + return torch.from_numpy(V).float().to(dev) + + def simplex_projection(x, delta): + """ + Euclidean projection on a positive simplex + """ + (p,) = x.shape + if torch.linalg.norm(x, ord=1) == delta and torch.all(x >= 0): + return x + u, _ = torch.sort(x, descending=True) + cssv = torch.cumsum(u, 0) + rho = torch.nonzero(u * torch.arange(1, p + 1).to(dev) > (cssv - delta))[-1, 0] + theta = (cssv[rho] - delta) / (rho + 1.0) + w = torch.clip(x - theta, min=0) + return w + + def projection(x, delta): + """ + Euclidean projection on an L1-ball + """ + x_abs = torch.abs(x) + if x_abs.sum() <= delta: + return x + + proj = simplex_projection(x_abs, delta=delta) + proj *= torch.sign(x) + + return proj + + # device selection + # if "cuda" in device and torch.cuda.is_available(): + # dev = torch.device(device) + # else: + # dev = torch.device("cpu") + + rng = check_random_state(random_state) + + if train_data is None: + raise ValueError("train_data must be provided to robust_bayesian_recourse") + + # ------- Implementation of fit_instance() ------------------ + x0_t = torch.from_numpy(x0.copy()).float().to(dev) + print(f"x0_t: {x0_t}") + + train_t = torch.tensor(train_data).float().to(dev) + # print(f"train_t: {train_t}") + + # training label vector + train_label = torch.tensor(predict_fn_np(train_t)).to(dev) + # print(f"train_label: {train_label}") + + # -------- Implementation of find_x_boundary() --------------- + # find nearest opposite label examples and search along line for boundary + x_label = torch.tensor(predict_fn_np(x0_t.clone()), device=dev) + print(f"x_label: {x_label}") + + dists = dist(train_t, x0_t) + order = torch.argsort(dists) + # print(f"order: {order}") + candidates = train_t[order[train_label[order] == (1 - x_label)]][:1000] + # print(f"candidates: {candidates}") + best_x_b = None + best_dist = torch.tensor(float("inf"), device=dev) + + for x_c in candidates: + lambdas = torch.linspace(0, 1, 100, device=dev) + for lam in lambdas: + x_b = (1 - lam) * x0_t + lam * x_c + label = predict_fn_np(x_b) + if label == 1 - x_label: + curdist = dist(x0_t, x_b) + if curdist < best_dist: + best_x_b = x_b.detach().clone() + best_dist = curdist.detach().clone() + break + # ------------------ end of find_x_boundary() -------------------- + + if best_x_b is None: + # fallback: nearest opposite neighbor directly + opp_idx = (train_label == (1 - x_label)).nonzero(as_tuple=False) + if opp_idx.shape[0] == 0: + # can't find opposite label in train set -> return original + return x0.copy() + first_idx = opp_idx[0, 0].item() + best_x_b = train_t[first_idx].detach().clone() + best_dist = dist(x0_t, best_x_b) + + delta = best_dist + delta_plus + + print(f"best_x_b: {best_x_b}, delta: {delta}") + + X_feas = uniform_ball(best_x_b, perturb_radius, num_samples, rng).float().to(dev) + + # apply categorical clamping if requested + # if cat_features_indices: + # for i in range(X_feas.shape[0]): + # X_feas[i] = reconstruct_encoding_constraints(X_feas[i], cat_features_indices) + + y_feas = predict_fn_np(X_feas) + + # print(f"X_feas: {X_feas}") + # print(f"y_feas: {y_feas}") + + if (y_feas == 1).any(): + X_feas_pos = X_feas[y_feas == 1].reshape([int((y_feas == 1).sum().item()), -1]) + else: + X_feas_pos = torch.empty((0, X_feas.shape[1]), device=dev) + + # print(f"X_feas_pos: {X_feas_pos}") + + if (y_feas == 0).any(): + X_feas_neg = X_feas[y_feas == 0].reshape([int((y_feas == 0).sum().item()), -1]) + else: + X_feas_neg = torch.empty((0, X_feas.shape[1]), device=dev) + + # print(f"[Debug] X_feas_pos shape: {X_feas_pos.shape}, X_feas_neg shape: {X_feas_neg.shape}") + # print(f"X_feas_neg: {X_feas_neg}") + # torch.autograd.set_detect_anomaly(True) # try to catch NaNs + # build loss wrapper + loss_fn = RBRLoss( + X_feas, + X_feas_pos, + X_feas_neg, + epsilon_op, + epsilon_pe, + sigma, + device=dev, + verbose=verbose, + ) + + # ---------------- Start of optimize() ---------------- + # optimization loop - same basic behaviour as original code. + x_t = best_x_b.detach().clone() + x_t.requires_grad_(True) + + min_loss = float("inf") + num_stable_iter = 0 + max_stable_iter = 10 + step = 1.0 / math.sqrt(1e3) + + for t in range(max_iter): + if x_t.grad is not None: + x_t.grad.data.zero_() + + F, denom, numer = loss_fn(x_t) + F_sum = F.sum() + F_sum.backward() + # if we left L1-ball, break + if torch.ge(torch.linalg.norm((x_t.detach() - x0_t), ord=1), float(delta)): + break + + with torch.no_grad(): + x_new = x_t - step * x_t.grad + # x_new = (lambda a, b: (a if torch.abs(a).sum() <= b else (l2_projection(a.unsqueeze(0), b).squeeze())))( + # x_new - x0_t, float(delta) + # ) + + x_new = projection(x_new - x0_t, float(delta)) + x0_t + + # print(f"x_new: {x_new}") + # enforce categorical encodings rounding/clamping + # if cat_features_indices: + # x_new = reconstruct_encoding_constraints(x_new, cat_features_indices) + + for i, e in enumerate(x_new.data): + x_t.data[i] = e + + loss_sum = F_sum.item() + loss_diff = min_loss - loss_sum + + if loss_diff <= 1e-10: + num_stable_iter += 1 + if num_stable_iter >= max_stable_iter: + break + else: + num_stable_iter = 0 + min_loss = min(min_loss, loss_sum) + + cf = x_t.detach().cpu().numpy().squeeze() + # print(f"Final counterfactual cf: {cf}") + + # ----------------------------- end of optimize() ----------------------- + + return cf diff --git a/methods/catalog/rbr/library/reproduce/__init__.py b/methods/catalog/rbr/library/reproduce/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/methods/catalog/rbr/library/reproduce/data/german.csv b/methods/catalog/rbr/library/reproduce/data/german.csv new file mode 100644 index 0000000..d1d2d63 --- /dev/null +++ b/methods/catalog/rbr/library/reproduce/data/german.csv @@ -0,0 +1,1001 @@ +status,duration,credit_history,purpose,amount,savings,employment_duration,installment_rate,personal_status_sex,other_debtors,present_residence,property,age,other_installment_plans,housing,number_credits,job,people_liable,telephone,foreign_worker,credit_risk +2,6,1,4,1169,1,5,4,4,1,4,4,67,3,3,2,3,1,2,1,1 +3,48,3,4,5951,2,3,2,2,1,2,4,22,3,3,1,3,1,1,1,0 +1,12,1,7,2096,2,4,2,4,1,3,4,49,3,3,1,2,2,1,1,1 +2,42,3,3,7882,2,4,2,4,3,4,3,45,3,1,1,3,2,1,1,1 +2,24,0,1,4870,2,3,3,4,1,4,1,53,3,1,2,3,2,1,1,0 +1,36,3,7,9055,1,3,2,4,1,4,1,35,3,1,1,2,2,2,1,1 +1,24,3,3,2835,4,5,3,4,1,4,3,53,3,3,1,3,1,1,1,1 +3,36,3,2,6948,2,3,2,4,1,2,2,35,3,2,1,4,1,2,1,1 +1,12,3,4,3059,5,4,2,1,1,4,4,61,3,3,1,2,1,1,1,1 +3,30,1,1,5234,2,1,4,3,1,2,2,28,3,3,2,4,1,1,1,0 +3,12,3,1,1295,2,2,3,2,1,1,2,25,3,2,1,3,1,1,1,0 +2,48,3,10,4308,2,2,3,2,1,4,3,24,3,2,1,3,1,1,1,0 +3,12,3,4,1567,2,3,1,2,1,1,2,22,3,3,1,3,1,2,1,1 +2,24,1,1,1199,2,5,4,4,1,4,2,60,3,3,2,2,1,1,1,0 +2,15,3,1,1403,2,3,2,2,1,4,2,28,3,2,1,3,1,1,1,1 +2,24,3,4,1282,3,3,4,2,1,2,2,32,3,3,1,2,1,1,1,0 +1,24,1,4,2424,1,5,4,4,1,4,3,53,3,3,2,3,1,1,1,1 +2,30,2,10,8072,1,2,2,4,1,3,2,25,1,3,3,3,1,1,1,1 +3,24,3,2,12579,2,5,4,2,1,2,1,44,3,1,1,4,1,2,1,0 +1,24,3,4,3430,4,5,3,4,1,2,2,31,3,3,1,3,2,2,1,1 +1,9,1,1,2134,2,3,4,4,1,4,2,48,3,3,3,3,1,2,1,1 +2,6,3,4,2647,4,3,2,4,1,3,4,44,3,2,1,3,2,1,1,1 +2,10,1,1,2241,2,2,1,4,1,3,4,48,3,2,2,2,2,1,2,1 +3,12,1,2,1804,3,2,3,4,1,4,3,44,3,3,1,3,1,1,1,1 +1,10,1,3,2069,1,3,2,3,1,1,2,26,3,3,2,3,1,1,2,1 +2,6,3,3,1374,2,3,1,4,1,2,4,36,1,3,1,2,1,2,1,1 +1,6,2,4,426,2,5,4,3,1,4,2,39,3,3,1,2,1,1,1,1 +4,12,4,4,409,5,3,3,2,1,3,4,42,3,2,2,3,1,1,1,1 +3,7,3,4,2415,2,3,3,4,3,2,4,34,3,3,1,3,1,1,1,1 +2,60,0,10,6836,2,5,3,4,1,4,1,63,3,3,2,3,1,2,1,0 +3,18,3,10,1913,5,2,3,3,1,3,4,36,1,3,1,3,1,2,1,1 +2,24,3,3,4020,2,3,2,4,1,2,2,27,2,3,1,3,1,1,1,1 +3,18,3,1,5866,3,3,2,4,1,2,2,30,3,3,2,3,1,2,1,1 +1,12,1,10,1264,1,5,4,4,1,4,1,57,3,2,1,2,1,1,1,1 +4,12,3,3,1474,2,2,4,2,1,1,3,33,1,3,1,4,1,2,1,1 +3,45,1,4,4746,2,2,4,4,1,2,3,25,3,3,2,2,1,1,1,0 +1,48,1,7,6110,2,3,1,4,1,3,1,31,1,1,1,3,1,2,1,1 +4,18,3,4,2100,2,3,4,4,2,2,4,37,2,3,1,3,1,1,1,0 +4,10,3,5,1225,2,3,2,4,1,2,2,37,3,3,1,3,1,2,1,1 +3,9,3,4,458,2,3,4,4,1,3,4,24,3,3,1,3,1,1,1,1 +1,30,3,4,2333,4,5,4,4,1,2,2,30,1,3,1,4,1,1,1,1 +3,12,3,4,1158,4,3,3,1,1,1,2,26,3,3,1,3,1,2,1,1 +3,18,0,6,6204,2,3,2,4,1,4,4,44,3,3,1,2,2,2,1,1 +2,30,1,2,6187,3,4,1,3,1,4,2,24,3,2,2,3,1,1,1,1 +2,48,1,2,6143,2,5,4,2,1,4,1,58,2,1,2,2,1,1,1,0 +1,11,1,1,1393,2,2,4,2,1,4,2,35,3,3,2,4,1,1,1,1 +1,36,3,4,2299,4,5,4,4,1,4,2,39,3,3,1,3,1,1,1,1 +2,6,3,2,1352,4,1,1,2,1,2,3,23,3,2,1,1,1,2,1,1 +1,11,1,1,7228,2,3,1,4,1,4,3,39,3,3,2,2,1,1,1,1 +1,12,3,4,2073,3,3,4,2,2,2,4,28,3,3,1,3,1,1,1,1 +3,24,0,3,2333,1,2,4,4,1,2,3,29,1,3,1,2,1,1,1,1 +3,27,0,2,5965,2,5,1,4,1,2,2,30,3,3,2,4,1,2,1,1 +1,12,3,4,1262,2,3,3,4,1,2,2,25,3,3,1,3,1,1,1,1 +1,18,3,2,3378,1,3,2,4,1,1,3,31,3,3,1,3,1,2,1,1 +3,36,0,1,2225,2,5,4,4,1,4,1,57,1,1,2,3,1,2,1,0 +1,6,4,1,783,1,3,1,4,3,2,4,26,2,3,1,2,2,1,1,1 +3,12,3,4,6468,1,1,2,4,1,1,1,52,3,3,1,4,1,2,1,0 +1,36,1,4,9566,2,3,2,2,1,2,2,31,2,3,2,3,1,1,1,1 +4,18,3,1,1961,2,5,3,2,1,2,2,23,3,3,1,4,1,1,1,1 +2,36,1,3,6229,2,2,4,2,2,4,1,23,3,2,2,2,1,2,1,0 +3,9,3,10,1391,2,3,2,3,1,1,4,27,1,3,1,3,1,2,1,1 +3,15,1,4,1537,1,5,4,4,3,4,4,50,3,3,2,3,1,2,1,1 +3,36,2,10,1953,2,5,4,4,1,4,1,61,3,1,1,4,1,2,1,0 +3,48,2,10,14421,2,3,2,4,1,2,2,25,3,3,1,3,1,2,1,0 +1,24,3,4,3181,2,2,4,2,1,4,3,26,3,3,1,3,1,2,1,1 +1,27,3,6,5190,1,5,4,4,1,4,3,48,3,3,4,3,2,2,1,1 +1,12,3,4,2171,2,2,2,2,1,2,2,29,1,3,1,3,1,1,1,1 +3,12,3,1,1007,5,3,4,3,1,1,4,22,3,3,1,3,1,1,1,1 +1,36,3,7,1819,2,3,4,4,1,4,1,37,2,1,1,3,1,2,1,0 +1,36,3,4,2394,1,3,4,2,1,4,2,25,3,3,1,3,1,1,1,1 +1,36,3,2,8133,2,3,1,2,1,2,3,30,1,3,1,3,1,1,1,1 +1,7,1,4,730,1,5,4,4,1,2,3,46,3,2,2,2,1,2,1,1 +2,8,1,0,1164,2,5,3,4,1,4,1,51,1,1,2,4,2,2,1,1 +3,42,1,10,5954,2,4,2,2,1,1,4,41,1,3,2,2,1,1,1,1 +2,36,3,7,1977,1,5,4,4,1,4,1,40,3,3,1,4,1,2,1,0 +2,12,1,2,1526,2,5,4,4,1,4,1,66,3,1,2,4,1,1,1,1 +2,42,3,4,3965,2,2,4,4,1,3,2,34,3,3,1,3,1,1,1,0 +3,11,0,4,4771,2,4,2,4,1,4,3,51,3,3,1,3,1,1,1,1 +1,54,2,2,9436,1,3,2,4,1,2,3,39,3,3,1,2,2,1,1,1 +3,30,3,3,3832,2,2,2,3,1,1,3,22,3,3,1,3,1,1,1,1 +1,24,3,4,5943,1,2,1,2,1,1,2,44,3,3,2,3,1,2,1,0 +1,15,3,4,1213,4,5,4,4,1,3,3,47,2,3,1,3,1,2,1,1 +1,18,3,10,1568,3,3,3,2,1,4,3,24,3,2,1,2,1,1,1,1 +2,24,3,0,1755,2,5,4,2,3,4,4,58,3,3,1,2,1,2,1,1 +2,10,3,4,2315,2,5,3,4,1,4,4,52,3,3,1,2,1,1,1,1 +1,12,1,10,1412,2,3,4,2,3,2,4,29,3,3,2,4,1,2,1,1 +3,18,1,3,1295,2,2,4,2,1,1,3,27,3,3,2,3,1,1,1,1 +3,36,3,7,12612,3,3,1,4,1,4,1,47,3,1,1,3,2,2,1,0 +2,18,3,1,2249,3,4,4,4,1,3,2,30,3,3,1,4,2,2,1,1 +2,12,2,6,1108,2,4,4,4,1,3,4,28,3,3,2,3,1,1,1,0 +1,12,1,4,618,2,5,4,4,1,4,4,56,3,3,1,3,1,1,1,1 +2,12,1,2,1409,2,5,4,4,1,3,4,54,3,3,1,3,1,1,1,1 +1,12,1,4,797,1,5,4,2,1,3,3,33,1,3,1,2,2,1,1,0 +4,24,1,3,3617,1,5,4,4,2,4,1,20,3,2,2,3,1,1,1,1 +3,12,3,1,1318,5,5,4,4,1,4,4,54,3,3,1,3,1,2,1,1 +3,54,2,10,15945,2,2,3,4,1,4,1,58,3,2,1,3,1,2,1,0 +1,12,1,7,2012,1,4,4,2,1,2,2,61,3,3,1,3,1,1,1,1 +3,18,3,10,2622,3,3,4,4,1,4,2,34,3,3,1,3,1,1,1,1 +3,36,1,4,2337,2,5,4,4,1,4,4,36,3,3,1,3,1,1,1,1 +3,20,0,2,7057,1,4,3,4,1,4,3,36,1,2,2,4,2,2,1,1 +1,24,3,1,1469,3,5,4,3,1,4,4,41,3,2,1,2,1,1,1,1 +3,36,3,4,2323,2,4,4,4,1,4,2,24,3,2,1,3,1,1,1,1 +1,6,0,4,932,2,3,3,2,1,2,4,24,3,3,1,3,1,1,1,1 +3,9,1,3,1919,2,4,4,4,1,3,2,35,3,2,1,3,1,2,1,1 +1,12,3,2,2445,1,2,2,3,1,4,2,26,3,2,1,3,1,2,1,1 +3,24,1,0,11938,2,3,2,4,2,3,2,39,3,3,2,4,2,2,1,0 +1,18,4,1,6458,2,5,2,4,1,4,1,39,1,3,2,4,2,2,1,0 +3,12,3,1,6078,2,4,2,4,1,2,2,32,3,3,1,3,1,1,1,1 +2,24,3,3,7721,1,2,1,2,1,2,3,30,3,3,1,3,1,2,2,1 +3,14,3,10,1410,4,5,1,3,1,2,4,35,3,3,1,3,1,2,1,1 +3,6,0,10,1449,3,5,1,1,1,2,2,31,1,3,2,3,2,1,1,1 +4,15,3,7,392,2,2,4,2,1,4,3,23,3,2,1,3,1,2,1,1 +3,18,3,1,6260,2,4,3,4,1,3,4,28,3,2,1,2,1,1,1,1 +1,36,1,1,7855,2,3,4,2,1,2,4,25,2,3,2,3,1,2,1,0 +2,12,3,4,1680,4,5,3,3,1,1,4,35,3,3,1,3,1,1,1,1 +1,48,1,4,3578,1,5,4,4,1,1,4,47,3,3,1,3,1,2,1,1 +2,42,3,4,7174,1,4,4,2,1,3,2,30,3,3,1,4,1,2,1,0 +2,10,1,3,2132,1,2,2,2,2,3,4,27,3,2,2,3,1,1,2,1 +2,33,1,3,4281,4,3,1,2,1,4,2,23,3,3,2,3,1,1,1,0 +3,12,1,1,2366,4,4,3,1,1,3,2,36,3,3,1,4,1,2,1,1 +2,21,3,4,1835,2,3,3,2,1,2,4,25,3,3,2,3,1,2,1,0 +1,24,1,2,3868,2,5,4,2,1,2,2,41,3,2,2,4,1,2,1,1 +1,12,3,3,1768,2,3,3,4,1,2,4,24,3,2,1,2,1,1,1,1 +4,10,1,1,781,2,5,4,4,1,4,1,63,3,1,2,3,1,2,1,1 +3,18,3,3,1924,1,2,4,2,1,3,4,27,3,2,1,3,1,1,1,0 +2,12,1,1,2121,2,3,4,4,1,2,3,30,3,3,2,3,1,1,1,1 +2,12,3,4,701,2,3,4,3,1,2,4,40,3,3,1,2,1,1,1,1 +3,12,3,6,639,2,3,4,4,1,2,2,30,3,3,1,3,1,1,1,0 +3,12,1,2,1860,2,1,4,4,1,2,2,34,3,3,2,4,1,2,1,1 +2,12,1,1,3499,2,3,3,2,2,2,4,29,3,3,2,3,1,1,1,0 +3,48,3,1,8487,1,4,1,2,1,2,2,24,3,3,1,3,1,1,1,1 +2,36,0,7,6887,2,3,4,4,1,3,3,29,2,3,1,3,1,2,1,0 +1,15,3,3,2708,2,2,2,4,1,3,3,27,1,3,2,2,1,1,1,1 +1,18,3,3,1984,2,3,4,4,1,4,1,47,1,1,2,3,1,1,1,1 +1,60,3,4,10144,3,4,2,2,1,4,4,21,3,3,1,3,1,2,1,1 +1,12,1,4,1240,1,5,4,2,1,2,4,38,3,3,2,3,1,2,1,1 +1,27,0,2,8613,5,3,2,4,1,2,2,27,3,3,2,3,1,1,1,1 +3,12,3,4,766,4,3,4,4,1,3,4,66,3,3,1,2,1,1,1,0 +3,15,1,4,2728,1,4,4,4,3,2,4,35,1,3,3,3,1,2,1,1 +4,12,3,4,1881,2,3,2,2,1,2,2,44,3,2,1,2,1,2,1,1 +4,6,3,1,709,5,2,2,3,1,2,4,27,3,3,1,1,1,1,2,1 +3,36,3,4,4795,2,2,4,2,1,1,1,30,3,3,1,4,1,2,1,1 +2,27,3,4,3416,2,3,3,4,1,2,2,27,3,3,1,4,1,1,1,1 +2,18,3,3,2462,2,3,2,4,1,2,2,22,3,3,1,3,1,1,1,0 +1,21,1,3,2288,2,2,4,2,1,4,3,23,3,3,1,3,1,2,1,1 +3,48,4,10,3566,3,4,4,4,1,2,2,30,3,3,1,3,1,1,1,1 +2,6,1,1,860,2,5,1,2,1,4,1,39,3,3,2,3,1,2,1,1 +1,12,1,1,682,3,4,4,2,1,3,2,51,3,3,2,3,1,2,1,1 +2,36,1,3,5371,2,3,3,4,3,2,3,28,3,3,2,3,1,1,1,1 +1,18,1,4,1582,5,5,4,4,1,4,2,46,3,3,2,3,1,1,1,1 +1,6,3,4,1346,3,5,2,4,1,4,1,42,1,1,1,3,2,2,1,1 +1,10,3,4,1924,2,3,1,4,1,4,3,38,3,3,1,3,1,2,2,1 +4,36,3,4,5848,2,3,4,4,1,1,2,24,3,3,1,3,1,1,1,1 +3,24,1,2,7758,5,5,2,2,1,4,1,29,3,2,1,3,1,1,1,1 +3,24,0,10,6967,3,4,4,4,1,4,2,36,3,2,1,4,1,2,1,1 +2,12,3,3,1282,2,3,2,2,1,4,2,20,3,2,1,3,1,1,1,0 +2,9,1,6,1288,3,5,3,4,3,4,4,48,3,3,2,3,2,1,2,1 +2,12,4,9,339,2,5,4,3,1,1,2,45,1,3,1,2,1,1,1,1 +3,24,3,1,3512,3,4,2,4,1,3,2,38,1,3,2,3,1,2,1,1 +1,6,1,4,1898,1,3,1,4,1,2,4,34,3,3,2,2,2,1,1,1 +1,24,1,4,2872,3,5,3,4,1,4,4,36,3,3,1,3,2,2,1,1 +1,18,1,1,1055,2,2,4,2,1,1,3,30,3,3,2,3,1,1,1,1 +1,15,3,5,1262,4,4,4,4,1,3,3,36,3,3,2,3,1,2,1,1 +3,10,3,1,7308,2,1,2,4,1,4,1,70,1,1,1,4,1,2,1,1 +1,36,3,1,909,4,5,4,4,1,4,3,36,3,3,1,3,1,1,1,1 +1,6,3,3,2978,4,3,1,4,1,2,2,32,3,3,1,3,1,2,1,1 +2,18,3,3,1131,2,1,4,2,1,2,2,33,3,3,1,3,1,1,1,0 +3,11,3,3,1577,5,2,4,2,1,1,4,20,3,3,1,3,1,1,1,1 +1,24,3,3,3972,2,4,2,2,1,4,3,25,3,2,1,3,1,2,1,1 +3,24,1,10,1935,2,5,4,1,1,4,4,31,3,3,2,3,1,2,1,0 +2,15,2,1,950,2,5,4,4,1,3,2,33,3,2,2,3,2,1,1,0 +1,12,3,3,763,2,3,4,2,1,1,4,26,3,3,1,3,1,2,1,1 +3,24,0,3,2064,2,1,3,2,1,2,3,34,3,3,1,4,1,2,1,0 +3,8,3,4,1414,2,3,4,4,3,2,4,33,3,3,1,3,1,1,2,1 +2,21,0,7,3414,2,2,2,4,1,1,3,26,3,3,2,3,1,1,1,0 +1,30,4,2,7485,1,1,4,2,1,1,4,53,1,3,1,4,1,2,1,0 +2,12,3,3,2577,2,3,2,1,1,1,2,42,3,3,1,3,1,1,1,1 +2,6,1,4,338,4,5,4,4,1,4,2,52,3,3,2,3,1,1,1,1 +1,12,3,4,1963,2,4,4,4,1,2,2,31,3,2,2,4,2,2,1,1 +2,21,1,1,571,2,5,4,4,1,4,4,65,3,3,2,3,1,1,1,1 +1,36,0,10,9572,2,2,1,1,1,1,2,28,3,3,2,3,1,1,1,0 +3,36,0,10,4455,2,3,2,1,1,2,4,30,2,3,2,4,1,2,1,0 +2,21,4,1,1647,1,3,4,4,1,2,3,40,3,3,2,2,2,1,1,0 +1,24,1,3,3777,5,3,4,4,1,4,4,50,3,3,1,3,1,2,1,1 +3,18,1,1,884,2,5,4,4,1,4,2,36,1,3,1,3,2,2,1,0 +1,15,1,4,1360,2,3,4,4,1,2,3,31,3,3,2,3,1,1,1,1 +3,9,4,2,5129,2,5,2,2,1,4,1,74,1,1,1,4,2,2,1,0 +3,16,1,1,1175,2,1,2,4,1,3,2,68,3,1,3,1,1,2,1,1 +2,12,3,4,674,3,4,4,3,1,1,3,20,3,3,1,3,1,1,1,0 +3,18,2,3,3244,2,3,1,2,1,4,2,33,1,3,2,3,1,2,1,1 +1,24,3,10,4591,5,3,2,4,1,3,3,54,3,3,3,4,1,2,1,0 +3,48,2,10,3844,3,4,4,4,1,4,1,34,3,1,1,2,2,1,1,0 +3,27,3,10,3915,2,3,4,4,1,2,2,36,3,3,1,3,2,2,1,0 +1,6,3,4,2108,2,4,2,3,1,2,4,29,3,2,1,3,1,1,1,1 +3,45,3,4,3031,3,3,4,4,3,4,3,21,3,2,1,3,1,1,1,0 +3,9,1,7,1501,2,5,2,2,1,3,2,34,3,3,2,4,1,2,1,0 +1,6,1,4,1382,2,3,1,2,1,1,2,28,3,3,2,3,1,2,1,1 +3,12,3,3,951,3,2,4,2,1,4,2,27,1,2,4,3,1,1,1,0 +3,24,3,2,2760,1,5,4,4,1,4,1,36,1,1,1,3,1,2,1,1 +3,18,0,3,4297,2,5,4,1,1,3,1,40,3,3,1,4,1,2,1,0 +1,9,1,7,936,4,5,4,4,1,2,2,52,3,3,2,3,1,2,1,1 +2,12,3,1,1168,2,3,4,3,1,3,4,27,3,3,1,2,1,1,1,1 +1,27,0,10,5117,2,4,3,4,1,4,2,26,3,3,2,3,1,1,1,1 +2,12,3,9,902,2,4,4,3,1,4,3,21,3,2,1,3,1,1,1,0 +1,12,1,1,1495,2,5,4,4,1,1,4,38,3,3,2,2,2,1,1,1 +2,30,1,2,10623,2,5,3,4,1,4,1,38,3,1,3,4,2,2,1,1 +1,12,1,3,1935,2,5,4,4,1,4,4,43,3,3,3,3,1,2,1,1 +3,12,1,5,1424,2,4,4,4,1,3,3,26,3,3,1,3,1,1,1,1 +2,24,3,10,6568,2,3,2,3,1,2,2,21,2,3,1,2,1,1,1,1 +1,12,3,2,1413,5,4,3,4,1,2,3,55,3,3,1,3,1,1,2,1 +1,9,1,4,3074,1,3,1,4,1,2,4,33,3,3,2,3,2,1,1,1 +1,36,3,4,3835,1,5,2,2,1,4,4,45,3,3,1,2,1,2,1,1 +2,27,2,10,5293,2,1,2,4,1,4,3,50,2,3,2,3,1,2,1,0 +4,30,0,10,1908,2,5,4,4,1,4,4,66,3,3,1,4,1,2,1,0 +1,36,1,4,3342,1,5,4,4,1,2,2,51,3,3,1,3,1,2,1,1 +3,6,1,9,932,1,4,1,2,1,3,3,39,3,3,2,2,1,1,1,1 +2,18,2,10,3104,2,4,3,4,1,1,3,31,1,3,1,3,1,2,1,1 +4,36,3,4,3913,2,3,2,4,1,2,4,23,3,3,1,3,1,2,1,1 +2,24,3,3,3021,2,3,2,1,1,2,4,24,3,2,1,2,1,1,1,1 +1,10,3,1,1364,2,3,2,2,1,4,2,64,3,3,1,3,1,2,1,1 +3,12,3,4,625,2,2,4,3,3,1,4,26,1,3,1,2,1,1,1,1 +2,12,3,7,1200,1,3,4,2,1,4,3,23,1,2,1,3,1,2,1,1 +1,12,3,4,707,2,3,4,4,1,2,4,30,1,3,2,3,1,1,1,1 +1,24,0,10,2978,1,3,4,4,1,4,4,32,3,3,2,3,2,2,1,1 +1,15,3,2,4657,2,3,3,4,1,2,2,30,3,3,1,3,1,2,1,1 +1,36,2,6,2613,2,3,4,4,1,2,2,27,3,3,2,3,1,1,1,1 +3,48,3,4,10961,5,4,1,4,2,2,1,27,1,3,2,3,1,2,1,0 +2,12,3,3,7865,2,5,4,4,1,4,1,53,3,1,1,4,1,2,1,0 +1,9,3,4,1478,2,4,4,4,1,2,2,22,3,3,1,3,1,1,1,0 +2,24,3,3,3149,2,2,4,4,1,1,1,22,1,1,1,3,1,1,1,1 +4,36,3,4,4210,2,3,4,4,1,2,2,26,3,3,1,3,1,1,1,0 +1,9,3,1,2507,4,5,2,4,1,4,1,51,3,1,1,2,1,1,1,1 +1,12,3,4,2141,3,4,3,4,1,1,1,35,3,3,1,3,1,1,1,1 +3,18,3,4,866,2,3,4,3,3,2,4,25,3,3,1,2,1,1,1,1 +1,4,1,4,1544,2,4,2,4,1,1,4,42,3,3,3,2,2,1,1,1 +2,24,3,4,1823,2,1,4,4,1,2,2,30,2,3,1,4,2,1,1,0 +3,6,3,1,14555,1,1,1,4,1,2,3,23,3,3,1,1,1,2,1,0 +3,21,3,10,2767,3,5,4,1,1,2,2,61,1,2,2,2,1,1,1,0 +1,12,1,4,1291,2,3,4,2,1,2,3,35,3,3,2,3,1,1,1,1 +2,30,3,4,2522,2,5,1,4,3,3,3,39,3,3,1,3,2,1,1,1 +2,24,3,1,915,1,5,4,2,1,2,2,29,1,3,1,3,1,1,1,0 +1,6,3,4,1595,2,4,3,4,1,2,3,51,3,3,1,3,2,1,1,1 +2,48,2,2,4605,2,5,3,4,1,4,1,24,3,1,2,3,2,1,1,0 +1,12,1,10,1185,2,3,3,2,1,2,4,27,3,3,2,3,1,1,1,1 +1,12,4,9,3447,4,3,4,2,1,3,4,35,3,3,1,2,2,1,1,1 +1,24,3,10,1258,2,4,4,4,1,1,4,25,3,3,1,3,1,2,1,1 +1,12,1,4,717,2,5,4,4,1,4,4,52,3,3,3,3,1,1,1,1 +1,6,2,1,1204,3,3,4,4,1,1,1,35,1,2,1,3,1,1,2,1 +4,24,3,3,1925,2,3,2,4,1,2,4,26,3,3,1,3,1,1,1,1 +1,18,3,4,433,2,1,3,2,2,4,4,22,3,2,1,3,1,1,1,0 +2,6,1,1,666,5,4,3,2,1,4,4,39,3,3,2,2,1,2,1,1 +4,12,3,3,2251,2,3,1,2,1,2,2,46,3,3,1,2,1,1,1,1 +3,30,3,1,2150,2,3,4,2,3,2,1,24,1,3,1,3,1,1,1,0 +1,24,0,3,4151,3,3,2,4,1,3,3,35,3,3,2,3,1,1,1,1 +3,9,3,3,2030,1,4,2,4,1,1,2,24,3,3,1,3,1,2,1,1 +3,60,0,4,7418,1,3,1,4,1,1,4,27,3,3,1,2,1,1,1,1 +1,24,1,4,2684,2,3,4,4,1,2,4,35,3,3,2,2,1,1,1,1 +2,12,4,4,2149,2,3,4,1,1,1,1,29,3,1,1,3,1,1,1,0 +1,15,3,2,3812,3,2,1,2,1,4,2,23,3,3,1,3,1,2,1,1 +1,11,1,4,1154,3,1,4,2,1,4,4,57,3,3,3,2,1,1,1,1 +2,12,3,3,1657,2,3,2,4,1,2,4,27,3,3,1,3,1,1,1,1 +2,24,3,4,1603,2,5,4,2,1,4,2,55,3,3,1,3,1,1,1,1 +2,18,1,1,5302,2,5,2,4,1,4,1,36,3,1,3,4,1,2,1,1 +1,12,1,7,2748,2,5,2,2,1,4,1,57,1,1,3,2,1,1,1,1 +1,10,1,1,1231,2,5,3,4,1,4,4,32,3,3,2,2,2,1,2,1 +3,15,3,4,802,2,5,4,4,1,3,2,37,3,3,1,3,2,1,1,0 +1,36,1,10,6304,1,5,4,4,1,4,4,36,3,3,2,3,1,1,1,1 +1,24,3,4,1533,2,2,4,2,1,3,2,38,2,3,1,3,1,2,1,1 +2,14,3,1,8978,2,5,1,1,1,4,3,45,3,3,1,4,1,2,2,0 +1,24,3,4,999,1,5,4,4,1,2,2,25,3,3,2,3,1,1,1,1 +1,18,3,1,2662,1,4,4,4,1,3,3,32,3,3,1,3,1,1,2,1 +1,12,1,3,1402,4,4,3,2,1,4,2,37,3,2,1,3,1,2,1,1 +3,48,4,1,12169,1,1,4,4,2,4,1,36,3,1,1,4,1,2,1,1 +3,48,3,4,3060,2,4,4,4,1,4,4,28,3,3,2,3,1,1,1,0 +2,30,3,6,11998,2,2,1,1,1,1,1,34,3,3,1,2,1,2,1,0 +1,9,3,4,2697,2,3,1,4,1,2,4,32,3,3,1,3,2,1,1,1 +1,18,1,4,2404,2,3,2,2,1,2,2,26,3,3,2,3,1,1,1,1 +2,12,3,3,1262,1,5,2,1,1,4,3,49,3,3,1,2,1,2,1,1 +1,6,3,3,4611,2,2,1,2,1,4,3,32,3,3,1,3,1,1,1,0 +1,24,3,4,1901,3,3,4,4,1,4,2,29,3,2,1,4,1,2,1,1 +1,15,1,2,3368,5,5,3,4,1,4,1,23,3,2,2,3,1,2,1,1 +1,12,3,3,1574,2,3,4,4,1,2,4,50,3,3,1,3,1,1,1,1 +4,18,4,4,1445,1,4,4,4,1,4,2,49,1,3,1,2,1,1,1,1 +1,15,1,3,1520,1,5,4,4,1,4,3,63,3,3,1,3,1,1,1,1 +3,24,1,1,3878,3,2,4,1,1,2,2,37,3,3,1,3,1,2,1,1 +2,47,3,1,10722,2,2,1,2,1,1,4,35,3,3,1,2,1,2,1,1 +2,48,3,2,4788,2,4,4,4,1,3,3,26,3,3,1,3,2,1,1,1 +3,48,0,0,7582,3,1,2,4,1,4,1,31,3,1,1,4,1,2,1,1 +3,12,3,4,1092,2,3,4,2,3,4,4,49,3,3,2,3,1,2,1,1 +2,24,0,4,1024,2,2,4,3,1,4,4,48,2,3,1,3,1,1,1,0 +1,12,3,10,1076,2,3,2,3,1,2,4,26,3,3,1,3,1,2,2,1 +3,36,3,2,9398,2,2,1,3,1,4,2,28,3,2,1,4,1,2,1,0 +2,24,1,2,6419,2,5,2,2,1,4,1,44,3,1,2,4,2,2,1,1 +4,42,1,2,4796,2,5,4,4,1,4,1,56,3,1,1,3,1,1,1,1 +1,48,1,10,7629,1,5,4,1,1,2,2,46,1,3,2,4,2,1,1,1 +3,48,3,3,9960,2,2,1,2,1,2,2,26,3,3,1,3,1,2,1,0 +1,12,3,2,4675,1,2,1,2,1,4,2,20,3,2,1,3,1,1,1,1 +1,10,3,1,1287,1,5,4,4,2,2,3,45,3,3,1,2,1,1,2,1 +1,18,3,3,2515,2,3,3,4,1,4,4,43,3,3,1,3,1,2,1,1 +3,21,1,3,2745,5,4,3,4,1,2,2,32,3,3,2,3,1,2,1,1 +1,6,3,1,672,2,1,1,2,1,4,4,54,3,3,1,1,1,2,1,1 +3,36,2,4,3804,2,3,4,2,1,1,2,42,3,3,1,3,1,2,1,0 +4,24,1,1,1344,1,4,4,4,1,2,4,37,1,3,2,2,2,1,1,0 +2,10,1,1,1038,2,4,4,4,2,3,3,49,3,3,2,3,1,2,1,1 +1,48,1,1,10127,4,3,2,4,1,2,1,44,1,1,1,3,1,1,1,0 +1,6,3,3,1543,5,3,4,1,1,2,4,33,3,3,1,3,1,1,1,1 +1,30,3,2,4811,1,4,2,2,1,4,3,24,2,2,1,2,1,1,1,1 +2,12,3,4,727,3,2,4,3,1,3,1,33,3,3,1,2,1,2,1,0 +3,8,3,3,1237,2,3,3,2,1,4,4,24,3,3,1,3,1,1,1,0 +3,9,3,1,276,2,3,4,3,1,4,4,22,3,2,1,2,1,1,1,1 +3,48,3,0,5381,1,1,3,4,1,4,1,40,1,1,1,1,1,2,1,1 +1,24,3,3,5511,3,3,4,4,1,1,2,25,2,3,1,3,1,1,1,1 +4,24,3,3,3749,2,2,2,2,1,4,2,26,3,3,1,3,1,1,1,1 +3,12,3,1,685,2,4,2,3,1,3,2,25,1,3,1,2,1,1,1,0 +4,4,3,1,1494,1,2,1,4,1,2,4,29,3,3,1,2,2,1,2,1 +2,36,4,3,2746,2,5,4,4,1,4,2,31,1,3,1,3,1,1,1,0 +2,12,3,3,708,2,3,2,4,3,3,3,38,3,3,1,2,2,1,1,1 +3,24,3,3,4351,1,3,1,2,1,4,3,48,3,3,1,2,1,2,1,1 +1,12,1,7,701,2,3,4,4,1,2,2,32,3,3,2,3,1,1,1,1 +2,15,0,3,3643,2,5,1,2,1,4,3,27,3,3,2,2,1,1,1,1 +3,30,1,1,4249,2,1,4,3,1,2,2,28,3,3,2,4,1,1,1,0 +2,24,3,4,1938,2,2,4,1,1,3,3,32,3,3,1,3,1,1,1,0 +2,24,3,2,2910,2,4,2,4,1,1,1,34,3,1,1,4,1,2,1,1 +2,18,3,3,2659,5,3,4,4,1,2,2,28,3,3,1,3,1,1,1,1 +1,18,1,1,1028,2,3,4,2,1,3,4,36,3,3,2,3,1,1,1,1 +2,8,1,1,3398,2,4,1,4,1,4,4,39,3,3,2,2,1,1,2,1 +1,12,1,3,5801,1,5,2,4,1,4,3,49,3,2,1,3,1,2,1,1 +1,24,3,1,1525,5,4,4,2,1,3,2,34,3,3,1,3,2,2,1,1 +4,36,3,4,4473,2,5,4,4,1,2,2,31,3,3,1,3,1,1,1,1 +3,6,3,4,1068,2,5,4,4,1,4,2,28,3,3,1,3,2,1,1,1 +2,24,1,2,6615,2,1,2,4,1,4,1,75,3,1,2,4,1,2,1,1 +1,18,1,7,1864,3,3,4,2,1,2,4,30,3,3,2,3,1,1,1,0 +3,60,3,1,7408,3,2,4,2,1,2,3,24,3,3,1,4,1,1,1,0 +1,48,1,2,11590,3,3,2,2,1,4,2,24,1,2,2,2,1,1,1,0 +2,24,2,3,4110,2,5,3,4,1,4,1,23,1,2,2,3,2,1,1,0 +2,6,1,3,3384,2,3,1,1,1,4,4,44,3,2,1,4,1,2,1,0 +3,13,3,4,2101,2,2,2,2,3,4,3,23,3,3,1,2,1,1,1,1 +2,15,3,5,1275,1,3,4,2,1,2,2,24,3,2,1,3,1,1,1,0 +2,24,3,3,4169,2,3,4,4,1,4,3,28,3,3,1,3,1,1,1,1 +3,10,3,3,1521,2,3,4,1,1,2,2,31,3,3,1,2,1,1,1,1 +3,24,1,7,5743,2,2,2,2,1,4,1,24,3,1,2,3,1,2,1,1 +2,21,3,3,3599,2,4,1,2,1,4,2,26,3,2,1,2,1,1,1,1 +3,18,3,4,3213,4,2,1,3,1,3,4,25,3,2,1,3,1,1,1,1 +3,18,3,10,4439,2,5,1,4,2,1,4,33,1,3,1,4,1,2,1,1 +4,10,3,1,3949,2,2,1,4,3,1,3,37,3,3,1,2,2,1,1,1 +1,15,1,4,1459,2,3,4,2,1,2,2,43,3,3,1,2,1,1,1,1 +3,13,1,4,882,2,2,4,4,3,4,4,23,3,3,2,3,1,1,1,1 +3,24,3,4,3758,4,1,1,2,1,4,1,23,3,2,1,1,1,1,1,1 +1,6,0,10,1743,3,3,1,4,1,2,4,34,3,3,2,2,1,1,1,1 +3,9,1,7,1136,5,5,4,4,1,3,1,32,3,1,2,3,2,1,1,0 +1,9,3,5,1236,2,2,1,2,1,4,4,23,3,2,1,3,1,2,1,1 +3,9,3,3,959,2,3,1,2,1,2,2,29,3,3,1,3,1,1,2,0 +1,18,1,2,3229,1,1,2,4,1,4,1,38,3,3,1,4,1,2,1,1 +2,12,2,4,6199,2,3,4,4,1,2,3,28,3,2,2,3,1,2,1,0 +1,10,3,7,727,4,5,4,4,1,4,1,46,3,1,1,3,1,2,1,1 +3,24,3,1,1246,2,2,4,4,1,2,4,23,2,3,1,2,1,1,1,0 +1,12,1,4,2331,1,5,1,4,2,4,4,49,3,3,1,3,1,2,1,1 +1,36,0,4,4463,2,3,4,4,1,2,2,26,3,3,2,4,1,2,1,0 +1,12,3,4,776,2,3,4,3,1,2,4,28,3,3,1,3,1,1,1,1 +2,30,3,3,2406,2,4,4,2,1,4,4,23,3,2,1,3,1,1,1,0 +3,18,3,7,1239,1,3,4,4,1,4,1,61,3,1,1,3,1,1,1,1 +4,12,3,4,3399,1,5,2,4,1,3,2,37,3,3,1,4,1,1,1,1 +4,12,0,1,2247,2,3,2,2,1,2,2,36,2,3,2,3,1,2,1,1 +1,6,3,3,1766,2,3,1,3,1,2,3,21,3,2,1,3,1,1,1,1 +2,18,3,3,2473,2,1,4,4,1,1,2,25,3,3,1,1,1,1,1,0 +1,12,3,10,1542,2,4,2,4,1,4,2,36,3,3,1,3,1,2,1,1 +1,18,1,2,3850,2,4,3,4,1,1,2,27,3,3,2,3,1,1,1,1 +2,18,3,3,3650,2,2,1,2,1,4,2,22,3,2,1,3,1,1,1,1 +2,36,3,3,3446,2,5,4,4,1,2,2,42,3,3,1,3,2,1,1,0 +3,18,3,3,3001,2,4,2,2,1,4,4,40,3,2,1,3,1,1,1,1 +1,36,3,1,3079,1,3,4,4,1,4,4,36,3,3,1,3,1,1,1,1 +1,18,1,4,6070,2,5,3,4,1,4,2,33,3,3,2,3,1,2,1,1 +1,10,1,3,2146,2,2,1,2,1,3,4,23,3,2,2,3,1,1,1,1 +1,60,1,1,13756,1,5,2,4,1,4,1,63,1,1,1,4,1,2,1,1 +3,60,4,0,14782,3,5,3,2,1,4,1,60,1,1,2,4,1,2,1,0 +2,48,4,10,7685,2,4,2,2,3,4,2,37,3,2,1,3,1,1,1,0 +1,18,0,4,2320,2,1,2,3,1,3,4,34,3,3,2,3,1,1,1,1 +1,7,0,4,846,1,5,3,4,1,4,1,36,3,1,1,3,1,1,1,1 +3,36,3,1,14318,2,5,4,4,1,2,1,57,3,1,1,4,1,2,1,0 +1,6,1,1,362,3,3,4,2,1,4,2,52,3,3,2,2,1,1,1,1 +2,20,3,3,2212,1,4,4,4,1,4,2,39,3,3,1,3,1,2,1,1 +3,18,3,2,12976,2,1,3,2,1,4,1,38,3,1,1,4,1,2,1,0 +1,22,3,1,1283,1,4,4,2,1,4,3,25,3,2,1,3,1,1,1,1 +4,12,3,1,1330,2,2,4,4,1,1,4,26,3,3,1,3,1,1,1,1 +1,30,0,10,4272,3,3,2,4,1,2,3,26,3,3,2,2,1,1,1,1 +1,18,1,4,2238,2,3,2,2,1,1,2,25,3,3,2,3,1,1,1,1 +1,18,3,4,1126,1,2,4,2,1,2,4,21,3,2,1,3,1,2,1,1 +3,18,1,3,7374,2,1,4,4,1,4,3,40,2,3,2,4,1,2,1,1 +3,15,1,10,2326,4,3,2,4,1,4,2,27,1,3,1,3,1,1,1,1 +1,9,3,10,1449,2,4,3,2,1,2,2,27,3,3,2,3,1,1,1,1 +1,18,3,1,1820,2,3,2,3,1,2,3,30,3,3,1,4,1,2,1,1 +3,12,3,3,983,5,2,1,2,1,4,4,19,3,2,1,2,1,1,1,1 +2,36,3,1,3249,2,4,2,4,1,4,1,39,1,1,1,4,2,2,1,1 +2,6,1,4,1957,2,4,1,2,1,4,2,31,3,3,1,3,1,1,1,1 +1,9,1,3,2406,2,1,2,4,1,3,2,31,3,3,1,4,1,1,1,1 +3,39,0,7,11760,3,4,2,4,1,3,1,32,3,2,1,3,1,2,1,1 +2,12,3,3,2578,2,1,3,2,1,4,1,55,3,1,1,4,1,1,1,1 +2,36,1,3,2348,2,3,3,3,1,2,3,46,3,3,2,3,1,2,1,1 +3,12,3,1,1223,2,5,1,1,1,1,4,46,3,2,2,3,1,1,1,0 +1,24,1,4,1516,5,3,4,2,1,1,4,43,3,3,2,2,1,1,1,1 +1,18,3,4,1473,2,2,3,3,1,4,4,39,3,3,1,3,1,2,1,1 +3,18,1,10,1887,1,3,4,3,1,4,4,28,1,3,2,3,1,1,1,1 +1,24,0,10,8648,2,2,2,4,1,2,2,27,1,3,2,3,1,2,1,0 +1,14,0,1,802,2,3,4,4,1,2,2,27,3,3,2,2,1,1,1,1 +3,18,0,1,2899,1,5,4,4,1,4,2,43,3,3,1,3,2,1,1,1 +3,24,3,4,2039,2,2,1,3,1,1,3,22,3,3,1,3,1,2,1,0 +1,24,1,2,2197,1,4,4,4,1,4,2,43,3,3,2,3,2,2,1,1 +2,15,3,4,1053,2,2,4,3,1,2,4,27,3,3,1,3,1,1,2,1 +1,24,3,4,3235,4,5,3,1,1,2,2,26,3,3,1,4,1,2,1,1 +4,12,1,1,939,4,4,4,3,1,2,4,28,3,3,3,3,1,2,1,0 +3,24,3,4,1967,2,5,4,2,1,4,2,20,3,3,1,3,1,2,1,1 +1,33,1,2,7253,2,4,3,4,1,2,2,35,3,3,2,4,1,2,1,1 +1,12,1,10,2292,2,1,4,4,1,2,2,42,2,3,2,4,1,2,1,0 +1,10,3,1,1597,4,3,3,4,1,2,1,40,3,2,1,2,2,1,2,1 +2,24,3,1,1381,1,3,4,2,1,2,3,35,3,3,1,3,1,1,1,0 +1,36,1,2,5842,2,5,2,4,1,2,3,35,3,3,2,3,2,2,1,1 +2,12,3,1,2579,2,2,4,4,1,1,4,33,3,3,1,2,2,1,1,0 +2,18,0,7,8471,1,3,1,2,1,2,2,23,3,2,2,3,1,2,1,1 +1,21,3,1,2782,4,4,1,2,1,2,2,31,1,3,1,4,1,1,1,1 +3,18,3,1,1042,1,3,4,2,1,2,3,33,3,3,1,3,1,1,1,0 +1,15,3,1,3186,5,4,2,2,1,3,2,20,3,2,1,3,1,1,1,1 +3,12,3,2,2028,1,3,4,4,1,2,2,30,3,3,1,3,1,1,1,1 +3,12,1,1,958,2,4,2,4,1,3,4,47,3,3,2,2,2,1,1,1 +1,21,0,3,1591,3,4,4,4,1,3,4,34,3,3,2,4,1,1,1,1 +3,12,3,3,2762,1,5,1,2,1,2,3,25,1,3,1,3,1,2,1,0 +3,18,3,2,2779,2,3,1,3,1,3,2,21,3,2,1,3,1,2,1,1 +1,28,1,4,2743,2,5,4,4,1,2,2,29,3,3,2,3,1,1,1,1 +1,18,1,4,1149,5,3,4,4,1,3,4,46,3,3,2,3,1,1,1,1 +1,9,3,3,1313,2,5,1,4,1,4,2,20,3,3,1,3,1,1,1,1 +2,18,1,6,1190,2,1,2,2,1,4,1,55,3,1,3,1,2,1,1,0 +1,5,3,10,3448,2,4,1,4,1,4,4,74,3,3,1,2,1,1,1,1 +3,24,3,0,11328,2,3,2,4,2,3,2,29,1,3,2,4,1,2,1,0 +2,6,1,3,1872,2,1,4,4,1,4,1,36,3,1,3,4,1,2,1,1 +1,24,1,6,2058,2,3,4,1,1,2,4,33,3,3,2,3,1,2,1,1 +2,9,3,3,2136,2,3,3,4,1,2,4,25,3,3,1,3,1,1,1,1 +3,12,3,4,1484,1,3,2,3,1,1,4,25,3,3,1,3,1,2,1,0 +1,6,3,6,660,4,4,2,3,1,4,4,23,3,2,1,2,1,1,1,1 +1,24,1,1,1287,5,5,4,2,1,4,4,37,3,3,2,3,1,2,1,1 +2,42,1,6,3394,2,1,4,4,2,4,2,65,3,3,2,1,1,1,1,1 +4,12,4,10,609,2,2,4,2,1,1,4,26,3,3,1,1,1,1,1,0 +1,12,3,1,1884,2,5,4,4,1,4,2,39,3,3,1,4,1,2,1,1 +2,12,3,3,1620,2,3,2,2,2,3,3,30,3,3,1,3,1,1,1,1 +3,20,0,0,2629,2,3,2,4,1,3,2,29,1,3,2,3,1,2,1,1 +1,12,3,7,719,2,5,4,4,1,4,2,41,1,3,1,2,2,1,1,0 +3,48,1,3,5096,2,3,2,2,1,3,2,30,3,3,1,4,1,2,1,0 +1,9,1,7,1244,1,5,4,2,1,4,3,41,3,2,2,2,1,1,1,1 +2,36,3,1,1842,2,2,4,2,1,4,2,34,3,3,1,3,1,2,1,0 +3,7,3,4,2576,2,3,2,4,3,2,4,35,3,3,1,3,1,1,1,1 +4,12,3,3,1424,1,5,3,2,1,4,4,55,3,3,1,4,1,2,1,1 +3,15,0,6,1512,5,3,3,3,1,3,3,61,2,3,2,3,1,1,1,0 +1,36,1,2,11054,1,3,4,4,1,2,2,30,3,3,1,4,1,2,1,1 +1,6,3,4,518,2,3,3,2,1,1,4,29,3,3,1,3,1,1,1,1 +1,12,2,3,2759,2,5,2,4,1,4,3,34,3,3,2,3,1,1,1,1 +1,24,3,2,2670,2,5,4,4,1,4,2,35,3,3,1,4,1,2,1,1 +2,24,3,1,4817,2,4,2,4,2,3,3,31,3,3,1,3,1,2,1,0 +1,24,3,2,2679,2,2,4,2,1,1,1,29,3,3,1,4,1,2,1,1 +2,11,1,1,3905,2,3,2,4,1,2,4,36,3,2,2,3,2,1,1,1 +2,12,3,2,3386,2,5,3,4,1,4,1,35,3,1,1,3,1,2,1,0 +2,6,3,5,343,2,2,4,2,1,1,4,27,3,3,1,3,1,1,1,1 +1,18,3,4,4594,2,2,3,4,1,2,2,32,3,3,1,3,1,2,1,1 +2,36,3,3,3620,2,3,1,4,3,2,3,37,3,3,1,3,2,1,1,1 +2,15,3,1,1721,2,2,2,4,1,3,4,36,3,3,1,3,1,1,1,1 +3,12,3,3,3017,2,2,3,2,1,1,4,34,3,2,1,4,1,1,1,1 +3,12,3,9,754,1,5,4,4,1,4,3,38,3,3,2,3,1,1,1,1 +1,18,3,10,1950,2,4,4,4,1,1,2,34,2,3,2,3,1,2,1,1 +2,24,3,2,2924,2,3,3,4,3,4,1,63,1,3,1,3,2,2,1,1 +2,24,0,4,1659,2,2,4,2,1,2,2,29,3,2,1,2,1,2,1,0 +1,48,0,4,7238,1,5,3,4,1,3,2,32,1,3,2,3,2,1,1,1 +1,33,0,10,2764,2,3,2,2,1,2,2,26,3,3,2,3,1,2,1,1 +1,24,0,2,4679,2,4,3,4,1,3,2,35,3,3,2,2,1,2,1,1 +3,24,3,4,3092,3,2,3,3,1,2,2,22,3,2,1,3,1,2,1,0 +2,6,3,7,448,2,2,4,2,1,4,3,23,3,3,1,3,1,1,1,0 +2,9,3,1,654,2,3,4,4,1,3,2,28,3,3,1,2,1,1,1,0 +1,6,3,9,1238,1,1,4,4,1,4,3,36,3,3,1,4,2,2,1,1 +3,18,1,4,1245,2,3,4,3,1,2,2,33,3,3,1,3,1,1,1,0 +2,18,2,3,3114,2,2,1,2,1,4,3,26,3,2,1,3,1,1,1,0 +1,39,3,2,2569,4,3,4,4,1,4,2,24,3,3,1,3,1,1,1,1 +4,24,3,4,5152,2,4,4,4,1,2,2,25,1,3,1,3,1,1,1,1 +3,12,3,10,1037,3,4,3,4,1,4,4,39,3,3,1,2,1,1,1,1 +2,15,1,3,1478,2,5,4,4,1,4,2,44,3,3,2,3,2,2,1,1 +3,12,1,4,3573,2,3,1,2,1,1,4,23,3,3,1,2,1,1,1,1 +3,24,3,1,1201,2,2,4,4,1,1,3,26,3,3,1,3,1,1,1,1 +2,30,3,3,3622,5,5,4,2,1,4,3,57,3,2,2,3,1,2,1,1 +1,15,0,3,960,5,4,3,2,1,2,3,30,3,3,2,3,1,1,1,1 +1,12,1,1,1163,4,3,4,4,1,4,4,44,3,3,1,3,1,2,1,1 +3,6,0,1,1209,2,1,4,4,1,4,3,47,3,3,1,4,1,2,1,0 +1,12,3,4,3077,2,3,2,4,1,4,2,52,3,3,1,3,1,2,1,1 +1,24,3,1,3757,2,5,4,2,2,4,1,62,3,1,1,3,1,2,1,1 +1,10,3,1,1418,3,3,3,4,1,2,4,35,3,2,1,2,1,1,2,1 +1,6,3,1,3518,2,3,2,4,3,3,3,26,3,2,1,3,1,1,1,1 +1,12,1,4,1934,2,5,2,4,1,2,1,26,3,3,2,3,1,1,1,1 +3,27,2,10,8318,2,5,2,2,1,4,1,42,3,1,2,4,1,2,1,0 +1,6,1,4,1237,3,3,1,2,1,1,3,27,3,3,2,3,1,1,1,1 +3,6,3,4,368,1,5,4,4,1,4,3,38,3,3,1,3,1,1,1,1 +2,12,1,1,2122,2,3,3,4,1,2,4,39,3,2,2,2,2,1,2,1 +2,24,3,3,2996,1,3,2,3,1,4,2,20,3,3,1,3,1,1,1,0 +3,36,3,3,9034,3,2,4,4,2,1,1,29,3,2,1,4,1,2,1,0 +1,24,1,3,1585,2,4,4,4,1,3,3,40,3,3,2,3,1,1,1,1 +3,18,3,4,1301,2,5,4,3,3,2,4,32,3,3,1,2,1,1,1,1 +4,6,1,1,1323,3,5,2,1,1,4,2,28,3,3,2,3,2,2,1,1 +2,24,3,1,3123,2,2,4,2,1,1,3,27,3,3,1,3,1,1,1,0 +2,36,3,2,5493,2,5,2,4,1,4,1,42,3,1,1,3,2,1,1,1 +4,9,3,4,1126,3,5,2,1,1,4,4,49,3,3,1,3,1,1,1,1 +3,24,1,4,1216,3,2,4,4,1,4,1,38,1,3,2,3,2,1,1,0 +2,24,3,1,1207,2,2,4,2,1,4,3,24,3,2,1,3,1,1,1,0 +1,10,3,1,1309,1,3,4,4,3,4,3,27,3,3,1,2,1,1,1,0 +4,15,1,2,2360,4,3,2,4,1,2,2,36,3,3,1,3,1,2,1,1 +3,15,4,1,6850,3,1,1,4,1,2,3,34,3,3,1,4,2,2,1,0 +1,24,3,4,1413,2,3,4,3,1,2,3,28,3,3,1,3,1,1,1,1 +1,39,3,2,8588,3,5,4,4,1,2,2,45,3,3,1,4,1,2,1,1 +2,12,3,1,759,2,4,4,4,1,2,4,26,3,3,1,3,1,1,1,0 +1,36,3,2,4686,2,3,2,4,1,2,1,32,3,1,1,4,1,2,1,1 +4,15,3,10,2687,2,4,2,4,1,4,3,26,3,2,1,3,1,2,1,1 +3,12,0,4,585,2,3,4,3,2,4,4,20,3,2,2,3,1,1,1,1 +1,24,3,1,2255,1,2,4,4,1,1,3,54,3,3,1,3,1,1,1,1 +2,6,1,1,609,2,4,4,2,1,3,3,37,3,3,2,3,1,1,2,1 +2,6,1,1,1361,2,2,2,4,1,4,4,40,3,3,1,2,2,1,2,1 +1,36,1,3,7127,2,2,2,2,1,4,3,23,3,2,2,3,1,2,1,0 +2,6,3,1,1203,3,5,3,4,1,2,3,43,3,3,1,3,1,2,1,1 +1,6,1,4,700,1,5,4,4,1,4,1,36,3,1,2,3,1,1,1,1 +1,24,1,6,5507,2,5,3,4,1,4,1,44,3,1,2,3,1,1,1,1 +2,18,3,4,3190,2,3,2,2,1,2,4,24,3,3,1,3,1,1,1,0 +2,48,2,3,7119,2,3,3,4,1,4,1,53,3,1,2,3,2,1,1,0 +1,24,3,2,3488,3,4,3,2,1,4,2,23,3,3,1,3,1,1,1,1 +3,18,3,4,1113,2,3,4,2,3,4,4,26,3,3,1,2,2,1,1,1 +3,26,3,2,7966,2,2,2,4,1,3,2,30,3,3,2,3,1,1,1,1 +1,15,1,7,1532,3,3,4,2,1,3,2,31,3,3,1,3,1,1,1,1 +1,4,1,4,1503,2,4,2,4,1,1,4,42,3,3,2,2,2,1,1,1 +2,36,3,4,2302,2,3,4,1,1,4,2,31,3,2,1,3,1,1,1,0 +2,6,3,1,662,2,2,3,4,1,4,4,41,3,3,1,2,2,2,1,1 +3,36,3,7,2273,2,4,3,4,1,1,2,32,3,3,2,3,2,1,1,1 +3,15,3,1,2631,3,3,2,2,1,4,2,28,3,2,2,3,1,2,1,0 +1,12,0,2,1503,2,3,4,3,1,4,4,41,3,2,1,3,1,1,1,1 +1,24,3,4,1311,3,4,4,3,1,3,3,26,3,3,1,3,1,2,1,1 +1,24,3,4,3105,1,2,4,4,1,2,2,25,3,3,2,3,1,1,1,1 +4,21,1,7,2319,2,2,2,1,1,1,2,33,3,2,1,3,1,1,1,0 +2,6,3,1,1374,1,1,4,2,1,3,3,75,3,3,1,4,1,2,1,1 +3,18,1,3,3612,2,5,3,2,1,4,3,37,3,3,1,3,1,2,1,1 +2,48,3,1,7763,2,5,4,4,1,4,1,42,1,1,1,4,1,1,1,0 +4,18,3,3,3049,2,2,1,2,1,1,3,45,2,3,1,2,1,1,1,1 +3,12,3,4,1534,2,2,1,3,1,1,4,23,3,2,1,3,1,1,1,0 +1,24,0,1,2032,2,5,4,4,1,4,1,60,3,1,2,3,1,2,1,1 +2,30,3,3,6350,1,5,4,4,1,4,3,31,3,3,1,3,1,1,1,0 +4,18,3,3,2864,2,3,2,4,1,1,4,34,3,3,1,2,2,1,1,0 +1,12,1,1,1255,2,5,4,4,1,4,4,61,3,3,2,2,1,1,1,1 +2,24,0,1,1333,2,1,4,4,1,2,4,43,3,1,2,3,2,1,1,0 +1,24,1,1,2022,2,3,4,2,1,4,2,37,3,3,1,3,1,2,1,1 +1,24,3,4,1552,2,4,3,4,1,1,2,32,1,3,1,3,2,1,1,1 +2,12,4,4,626,2,3,4,2,1,4,4,24,1,3,1,2,1,1,1,0 +1,48,1,2,8858,1,4,2,4,1,1,1,35,3,1,2,3,1,2,1,1 +1,12,1,6,996,1,4,4,2,1,4,4,23,3,3,2,3,1,1,1,1 +1,6,4,4,1750,4,5,2,4,1,4,3,45,1,3,1,2,2,1,1,1 +2,48,3,4,6999,2,4,1,3,3,1,4,34,3,3,2,3,1,2,1,0 +3,12,1,1,1995,3,2,4,4,1,1,2,27,3,3,1,3,1,1,1,1 +3,9,3,7,1199,2,4,4,2,1,4,3,67,3,3,2,4,1,2,1,1 +3,12,3,4,1331,2,2,2,4,1,1,2,22,2,3,1,3,1,1,1,0 +3,18,2,1,2278,3,2,3,2,1,3,2,28,3,3,2,3,1,1,1,0 +1,21,2,1,5003,1,3,1,2,1,4,3,29,1,3,2,3,1,2,1,0 +2,24,4,3,3552,2,4,3,4,1,4,2,27,1,3,1,3,1,1,1,0 +3,18,1,3,1928,2,2,2,4,1,2,4,31,3,3,2,2,1,1,1,0 +2,24,3,2,2964,1,5,4,4,1,4,1,49,1,1,1,3,2,2,1,1 +2,24,4,4,1546,2,4,4,4,3,4,2,24,1,2,1,2,1,1,1,0 +4,6,0,4,683,2,2,2,2,1,1,3,29,1,3,1,3,1,1,1,1 +3,36,3,1,12389,1,3,1,4,1,4,1,37,3,1,1,3,1,2,1,0 +3,24,0,10,4712,1,3,4,4,1,2,3,37,1,3,2,4,1,2,1,1 +3,24,0,4,1553,3,4,3,2,1,2,3,23,3,2,2,3,1,2,1,1 +2,12,3,1,1372,2,4,2,1,1,3,2,36,3,3,1,3,1,1,1,0 +1,24,1,4,2578,5,5,2,4,1,2,2,34,3,3,1,3,1,1,1,1 +3,48,3,4,3979,1,4,4,4,1,1,2,41,3,3,2,3,2,2,1,1 +2,48,3,4,6758,2,3,3,2,1,2,2,31,3,3,1,3,1,2,1,0 +2,24,3,3,3234,2,2,4,2,1,4,4,23,3,2,1,2,1,2,1,0 +1,30,1,4,5954,2,4,3,4,2,2,2,38,3,3,1,3,1,1,1,1 +1,24,3,2,5433,1,1,2,2,1,4,3,26,3,2,1,4,1,2,1,1 +2,15,3,10,806,2,3,4,2,1,4,3,22,3,3,1,2,1,1,1,1 +3,9,3,4,1082,2,5,4,4,1,4,2,27,3,3,2,2,1,1,1,1 +1,15,1,3,2788,2,4,2,2,2,3,2,24,1,3,2,3,1,1,1,1 +3,12,3,4,2930,2,4,2,2,1,1,4,27,3,3,1,3,1,1,1,1 +1,24,1,7,1927,1,3,3,2,1,2,2,33,3,3,2,3,1,2,1,1 +3,36,1,1,2820,2,2,4,1,1,4,2,27,3,3,2,3,1,1,1,0 +1,24,3,9,937,2,2,4,3,1,3,2,27,3,3,2,2,1,1,1,1 +3,18,1,1,1056,2,5,3,4,3,3,4,30,1,3,2,3,1,1,1,0 +3,12,1,1,3124,2,2,1,4,1,3,4,49,1,3,2,2,2,1,1,1 +1,9,3,3,1388,2,3,4,2,1,2,4,26,3,2,1,3,1,1,1,1 +3,36,3,6,2384,2,2,4,4,1,1,1,33,3,2,1,2,1,1,1,0 +1,12,3,1,2133,1,5,4,2,1,4,1,52,3,1,1,4,1,2,1,1 +2,18,3,3,2039,2,3,1,2,1,4,4,20,1,2,1,3,1,1,1,0 +2,9,1,1,2799,2,3,2,4,1,2,4,36,3,2,2,3,2,1,1,1 +2,12,3,3,1289,2,3,4,4,3,1,3,21,3,3,1,2,1,1,1,1 +2,18,3,5,1217,2,3,4,3,1,3,4,47,3,3,1,2,1,2,1,0 +2,12,1,3,2246,2,5,3,4,1,3,3,60,3,3,2,3,1,1,1,0 +2,12,1,4,385,2,4,4,2,1,3,4,58,3,3,4,2,1,2,1,1 +3,24,0,1,1965,1,3,4,2,1,4,2,42,3,2,2,3,1,2,1,1 +1,21,3,10,1572,5,5,4,2,1,4,4,36,1,3,1,2,1,1,1,1 +3,24,3,1,2718,2,3,3,2,1,4,3,20,3,2,1,2,1,2,1,0 +2,24,4,0,1358,1,5,4,4,1,3,2,40,2,3,1,4,1,2,1,0 +3,6,4,1,931,3,2,1,2,1,1,3,32,2,3,1,2,1,1,1,0 +2,24,3,1,1442,2,4,4,2,1,4,2,23,3,2,2,3,1,1,1,0 +3,24,2,10,4241,2,3,1,4,1,4,4,36,3,3,3,2,1,2,1,0 +1,18,1,1,2775,2,4,2,4,1,2,3,31,1,3,2,3,1,1,1,0 +1,24,0,10,3863,2,3,1,4,1,2,1,32,3,1,1,3,1,1,1,1 +3,7,3,4,2329,2,2,1,2,3,1,4,45,3,3,1,3,1,1,1,1 +3,9,3,3,918,2,3,4,2,1,1,3,30,3,3,1,3,1,1,1,0 +3,24,4,7,1837,2,4,4,2,1,4,1,34,1,1,1,2,1,1,1,0 +1,36,3,3,3349,2,3,4,2,1,2,2,28,3,3,1,4,1,2,1,0 +4,10,3,3,1275,2,2,4,2,1,2,3,23,3,3,1,3,1,1,1,1 +2,24,4,3,2828,4,3,4,4,1,4,4,22,2,3,1,3,1,2,1,1 +1,24,1,10,4526,2,3,3,4,1,2,4,74,3,3,1,4,1,2,1,1 +3,36,3,4,2671,3,3,4,2,2,4,1,50,3,1,1,3,1,1,1,0 +1,18,3,4,2051,2,2,4,4,1,1,4,33,3,3,1,3,1,1,1,1 +1,15,3,2,1300,1,5,4,4,1,4,1,45,1,1,1,3,2,1,1,1 +2,12,3,5,741,3,1,4,2,1,3,3,22,3,3,1,3,1,1,1,0 +4,10,3,1,1240,3,5,1,2,1,4,1,48,3,1,1,2,2,1,1,0 +2,21,3,4,3357,5,2,4,2,1,2,2,29,1,3,1,3,1,1,1,1 +2,24,4,2,3632,2,3,1,2,3,4,2,22,1,2,1,3,1,1,2,1 +1,18,0,3,1808,2,4,4,2,1,1,4,22,3,3,1,3,1,1,1,0 +3,48,2,10,12204,1,3,2,4,1,2,2,48,1,3,1,4,1,2,1,1 +3,60,0,4,9157,1,3,2,4,1,2,1,27,3,1,1,4,1,1,1,1 +2,6,1,1,3676,2,3,1,4,1,3,4,37,3,2,3,3,2,1,1,1 +3,30,3,3,3441,3,3,2,2,2,4,2,21,3,2,1,3,1,1,1,0 +1,12,3,1,640,2,3,4,1,1,2,4,49,3,3,1,2,1,1,1,1 +3,21,1,10,3652,2,4,2,4,1,3,3,27,3,3,2,3,1,1,1,1 +1,18,1,1,1530,2,3,3,4,1,2,3,32,1,3,2,3,1,1,1,0 +1,48,3,10,3914,1,3,4,1,1,2,4,38,1,3,1,3,1,1,1,0 +2,12,3,3,1858,2,2,4,2,1,1,2,22,3,2,1,3,1,1,1,1 +2,18,3,4,2600,2,3,4,4,1,4,1,65,3,1,2,3,1,1,1,0 +1,15,3,4,1979,1,5,4,4,1,2,2,35,3,3,1,3,1,1,1,1 +4,6,3,3,2116,2,3,2,4,1,2,4,41,3,3,1,3,1,2,1,1 +3,9,4,1,1437,3,4,2,4,1,3,1,29,3,3,1,3,1,1,1,0 +1,42,1,3,4042,4,3,4,4,1,4,4,36,3,3,2,3,1,2,1,1 +1,9,3,7,3832,1,5,1,4,1,4,4,64,3,3,1,2,1,1,1,1 +2,24,3,4,3660,2,3,2,2,1,4,2,28,3,3,1,3,1,1,1,1 +2,18,4,3,1553,2,3,4,4,1,3,2,44,1,3,1,3,1,1,1,0 +3,15,3,4,1444,1,2,4,4,1,1,3,23,3,3,1,3,1,1,1,1 +1,9,3,3,1980,2,2,2,2,2,2,2,19,3,2,2,3,1,1,1,0 +3,24,3,1,1355,2,2,3,2,1,4,2,25,3,3,1,2,1,2,1,0 +1,12,3,7,1393,2,5,4,4,1,4,3,47,1,3,3,3,2,2,1,1 +1,24,3,4,1376,4,4,4,2,1,1,2,28,3,3,1,3,1,1,1,1 +1,60,0,4,15653,2,4,2,4,1,4,2,21,3,3,2,3,1,2,1,1 +1,12,3,4,1493,2,2,4,2,1,3,2,34,3,3,1,3,2,1,1,1 +2,42,0,4,4370,2,4,3,4,1,2,3,26,1,3,2,3,2,2,1,0 +2,18,3,7,750,2,1,4,2,1,1,4,27,3,3,1,1,1,1,1,0 +3,15,3,6,1308,2,5,4,4,1,4,2,38,3,3,2,2,1,1,1,1 +1,15,3,7,4623,3,3,3,4,1,2,3,40,3,3,1,4,1,2,1,0 +1,24,1,4,1851,2,4,4,3,3,2,2,33,3,3,2,3,1,2,1,1 +2,18,1,4,1880,2,4,4,3,1,1,3,32,3,3,2,4,1,2,1,1 +1,36,0,10,7980,1,2,4,4,1,4,2,27,3,2,2,3,1,2,1,0 +2,30,2,3,4583,2,3,2,1,3,2,4,32,3,3,2,3,1,1,1,1 +1,12,3,1,1386,4,3,2,2,1,2,3,26,3,3,1,3,1,1,1,0 +4,24,3,1,947,2,4,4,4,1,3,1,38,1,1,1,3,2,1,1,0 +2,12,3,7,684,2,3,4,4,1,4,2,40,3,2,1,2,2,1,1,0 +2,48,3,7,7476,2,4,4,4,1,1,1,50,3,1,1,4,1,2,1,1 +3,12,3,3,1922,2,3,4,4,1,2,3,37,3,3,1,2,1,1,1,0 +2,24,3,1,2303,2,5,4,4,2,1,4,45,3,3,1,3,1,1,1,0 +3,36,0,1,8086,3,5,2,4,1,4,2,42,3,3,4,4,1,2,1,0 +1,24,1,2,2346,2,4,4,4,1,3,2,35,3,3,2,3,1,2,1,1 +2,14,3,1,3973,2,1,1,4,1,4,1,22,3,1,1,3,1,1,1,1 +3,12,3,1,888,2,5,4,4,1,4,2,41,1,3,1,2,2,1,1,0 +1,48,3,4,10222,1,4,4,4,1,3,2,37,2,3,1,3,1,2,1,1 +3,30,2,10,4221,2,3,2,2,1,1,2,28,3,3,2,3,1,1,1,1 +3,18,1,3,6361,2,5,2,4,1,1,1,41,3,3,1,3,1,2,1,1 +4,12,3,4,1297,2,3,3,3,1,4,4,23,3,2,1,3,1,1,1,1 +2,12,3,1,900,1,3,4,3,1,2,2,23,3,3,1,3,1,1,1,0 +1,21,3,3,2241,2,5,4,4,1,2,4,50,3,3,2,3,1,1,1,1 +3,6,0,3,1050,2,1,4,4,1,1,3,35,2,3,2,4,1,2,1,1 +4,6,1,7,1047,2,3,2,2,1,4,3,50,3,3,1,2,1,1,1,1 +1,24,1,0,6314,2,1,4,4,2,2,1,27,1,3,2,4,1,2,1,1 +3,30,4,3,3496,5,3,4,4,1,2,2,34,2,3,1,3,2,2,1,1 +1,48,4,10,3609,2,3,1,2,1,1,4,27,2,3,1,3,1,1,1,1 +2,12,1,1,4843,2,5,3,4,2,4,3,43,3,2,2,3,1,2,1,0 +4,30,1,4,3017,2,5,4,4,1,4,3,47,3,3,1,3,1,1,1,1 +1,24,1,10,4139,3,3,3,4,1,3,3,27,3,3,2,2,1,2,1,1 +1,36,3,10,5742,3,4,2,4,1,2,2,31,3,3,2,3,1,2,1,1 +1,60,3,1,10366,2,5,2,4,1,4,3,42,3,3,1,4,1,2,1,1 +1,6,1,1,2080,4,3,1,3,1,2,2,24,3,3,1,3,1,1,1,1 +1,21,0,10,2580,4,2,4,4,1,2,4,41,1,3,1,2,2,1,1,0 +1,30,1,4,4530,2,4,4,2,1,4,2,26,3,2,1,4,1,2,1,1 +1,24,1,3,5150,2,5,4,4,1,4,2,33,3,3,1,3,1,2,1,1 +3,72,3,4,5595,3,3,2,3,1,2,2,24,3,3,1,3,1,1,1,0 +2,24,3,4,2384,2,5,4,4,1,4,4,64,1,2,1,2,1,1,1,1 +1,18,3,4,1453,2,2,3,2,1,1,4,26,3,3,1,3,1,1,1,1 +1,6,3,7,1538,2,2,1,2,1,2,1,56,3,3,1,3,1,1,1,1 +1,12,3,4,2279,1,3,4,4,1,4,1,37,3,1,1,3,1,2,1,1 +1,15,0,4,1478,2,3,4,3,1,3,4,33,1,3,2,3,1,1,1,1 +1,24,1,4,5103,2,2,3,3,1,3,1,47,3,1,3,3,1,2,1,1 +3,36,0,10,9857,3,4,1,4,1,3,3,31,3,3,2,2,2,2,1,1 +1,60,3,1,6527,1,3,4,4,1,4,1,34,3,1,1,3,2,2,1,1 +4,10,1,4,1347,1,4,4,4,1,2,3,27,3,3,2,3,1,2,1,1 +3,36,0,1,2862,3,5,4,4,1,3,1,30,3,1,1,3,1,1,1,1 +1,9,3,4,2753,3,5,3,4,2,4,2,35,3,3,1,3,1,2,1,1 +2,12,3,1,3651,5,3,1,4,1,3,3,31,3,3,1,3,2,1,1,1 +2,15,1,3,975,2,3,2,1,1,3,3,25,3,3,2,3,1,1,1,1 +3,15,3,6,2631,3,3,3,2,1,2,4,25,3,3,1,2,1,1,1,1 +3,24,3,4,2896,3,2,2,4,1,1,2,29,3,3,1,3,1,1,1,1 +2,6,1,1,4716,1,2,1,4,1,3,4,44,3,3,2,2,2,1,1,1 +1,24,3,4,2284,2,4,4,4,1,2,2,28,3,3,1,3,1,2,1,1 +1,6,3,2,1236,4,3,2,4,1,4,3,50,3,2,1,3,1,1,1,1 +3,12,3,4,1103,2,4,4,4,3,3,4,29,3,3,2,3,1,1,2,1 +1,12,1,1,926,2,1,1,2,1,2,3,38,3,3,1,1,1,1,1,1 +1,18,1,4,1800,2,3,4,4,1,2,2,24,3,3,2,3,1,1,1,1 +4,15,3,7,1905,2,5,4,4,1,4,2,40,3,2,1,4,1,2,1,1 +1,12,3,3,1123,4,3,4,2,1,4,2,29,3,2,1,2,1,1,1,0 +2,48,1,2,6331,2,5,4,4,1,4,1,46,3,1,2,3,1,2,1,0 +4,24,3,4,1377,3,5,4,2,1,2,1,47,3,1,1,3,1,2,1,1 +3,30,0,10,2503,3,5,4,4,1,2,3,41,2,3,2,3,1,1,1,1 +3,27,3,10,2528,2,2,4,2,1,1,3,32,3,3,1,3,2,2,1,1 +1,15,3,1,5324,4,5,1,2,1,4,1,35,3,1,1,3,1,1,1,1 +3,48,3,1,6560,3,4,3,4,1,2,3,24,3,3,1,3,1,1,1,0 +3,12,2,3,2969,2,2,4,2,1,3,3,25,3,2,2,3,1,1,1,0 +3,9,3,4,1206,2,5,4,2,1,4,4,25,3,3,1,3,1,1,1,1 +3,9,3,4,2118,2,3,2,4,1,2,4,37,3,3,1,2,2,1,1,1 +1,18,1,4,629,4,5,4,4,1,3,3,32,1,3,2,4,1,2,1,1 +2,6,4,7,1198,2,5,4,2,1,4,1,35,3,1,1,3,1,1,1,0 +1,21,3,2,2476,1,5,4,4,1,4,4,46,3,3,1,4,1,2,1,1 +2,9,1,4,1138,2,3,4,4,1,4,4,25,3,3,2,2,1,1,1,1 +3,60,3,1,14027,2,4,4,4,1,2,1,27,3,3,1,4,1,2,1,0 +1,30,1,2,7596,1,5,1,4,1,4,2,63,3,3,2,3,1,1,1,1 +1,30,1,4,3077,1,5,3,4,1,2,2,40,3,3,2,3,2,2,1,1 +1,18,3,4,1505,2,3,4,4,1,2,1,32,3,1,1,4,1,2,1,1 +4,24,1,4,3148,1,3,3,4,1,2,2,31,3,3,2,3,1,2,1,1 +3,20,2,2,6148,3,5,3,3,1,4,2,31,1,3,2,3,1,2,1,1 +4,9,2,4,1337,2,2,4,4,1,2,2,34,3,3,2,4,1,2,1,0 +3,6,4,7,433,5,2,4,2,1,2,3,24,1,2,1,3,2,1,1,0 +2,12,3,1,1228,2,3,4,2,1,2,4,24,3,3,1,2,1,1,1,0 +3,9,3,4,790,4,3,4,2,1,3,4,66,3,3,1,2,1,1,1,1 +1,27,3,1,2570,2,3,3,2,1,3,4,21,3,2,1,3,1,1,1,0 +1,6,1,1,250,5,3,2,2,1,2,4,41,1,3,2,2,1,1,1,1 +1,15,1,4,1316,4,3,2,3,1,2,3,47,3,3,2,2,1,1,1,1 +2,18,3,4,1882,2,3,4,2,1,4,2,25,1,2,2,3,1,1,1,0 +3,48,4,10,6416,2,5,4,2,1,3,1,59,3,2,1,3,1,1,1,0 +4,24,1,10,1275,5,3,2,1,1,4,4,36,3,3,2,3,1,2,1,1 +3,24,0,4,6403,2,2,1,4,1,2,2,33,3,3,1,3,1,1,1,1 +2,24,3,4,1987,2,3,2,4,1,4,4,21,3,2,1,2,2,1,1,0 +3,8,3,4,760,2,4,4,2,3,2,4,44,3,3,1,2,1,1,1,1 +1,24,3,2,2603,5,3,2,2,1,4,2,28,3,2,1,3,1,2,1,1 +1,4,1,1,3380,2,4,1,2,1,1,4,37,3,3,1,3,2,1,1,1 +3,36,4,5,3990,1,2,3,2,1,2,1,29,1,3,1,1,1,1,1,1 +3,24,3,2,11560,2,3,1,2,1,4,2,23,3,2,2,4,1,1,1,0 +2,18,3,1,4380,3,3,3,4,1,4,2,35,3,3,1,2,2,2,1,1 +1,6,1,1,6761,2,4,1,4,1,3,1,45,3,3,2,4,2,2,1,1 +3,30,2,10,4280,3,3,4,2,1,4,2,26,3,2,2,2,1,1,1,0 +2,24,4,1,2325,3,4,2,4,1,3,2,32,1,3,1,3,1,1,1,1 +3,10,4,4,1048,2,3,4,4,1,4,4,23,2,3,1,2,1,1,1,1 +1,21,3,4,3160,1,5,4,4,1,3,3,41,3,3,1,3,1,2,1,1 +2,24,4,3,2483,4,3,4,4,1,4,4,22,2,3,1,3,1,2,1,1 +2,39,1,3,14179,1,4,4,4,1,4,3,30,3,3,2,4,1,2,1,1 +2,13,1,10,1797,2,2,3,4,1,1,3,28,1,3,2,2,1,1,1,1 +2,15,3,1,2511,2,1,1,2,1,4,2,23,3,2,1,3,1,1,1,1 +2,12,3,1,1274,2,2,3,2,1,1,4,37,3,3,1,2,1,1,1,0 +1,21,3,2,5248,1,3,1,4,1,3,2,26,3,3,1,3,1,1,1,1 +1,15,3,2,3029,2,4,2,4,1,2,2,33,3,3,1,3,1,1,1,1 +2,6,3,3,428,2,5,2,2,1,1,3,49,1,3,1,3,1,2,1,1 +2,18,3,1,976,2,2,1,2,1,2,2,23,3,3,1,2,1,1,1,0 +3,12,3,10,841,3,4,2,2,1,4,4,23,3,2,1,2,1,1,1,1 +1,30,1,4,5771,2,4,4,2,1,2,2,25,3,3,2,3,1,1,1,1 +1,12,0,6,1555,5,5,4,4,1,4,1,55,3,1,2,3,2,1,1,0 +2,24,3,1,1285,1,4,4,2,1,4,1,32,3,2,1,3,1,1,1,0 +4,6,1,1,1299,2,3,1,4,1,1,4,74,3,3,3,1,2,1,2,1 +4,15,1,4,1271,1,3,3,4,1,4,1,39,3,1,2,3,1,2,1,0 +1,24,3,1,1393,2,3,2,4,3,2,4,31,3,3,1,3,1,2,1,1 +2,12,1,1,691,2,5,4,4,1,3,3,35,3,3,2,3,1,1,1,0 +1,15,1,1,5045,1,5,1,2,1,4,2,59,3,3,1,3,1,2,1,1 +2,18,1,3,2124,2,3,4,2,1,4,4,24,3,2,2,3,1,1,1,0 +2,12,3,4,2214,2,3,4,4,1,3,3,24,3,3,1,2,1,1,1,1 +1,21,1,1,12680,1,5,4,4,1,4,1,30,3,1,1,4,1,2,1,0 +1,24,1,1,2463,3,4,4,3,1,3,3,27,3,3,2,3,1,2,1,1 +3,12,3,4,1155,2,5,3,3,3,3,4,40,1,3,2,2,1,1,1,1 +2,30,3,3,3108,2,2,2,1,1,4,3,31,3,3,1,2,1,1,1,0 +1,10,3,2,2901,1,2,1,2,1,4,4,31,3,2,1,3,1,1,1,1 +3,12,1,3,3617,2,5,1,4,1,4,2,28,3,2,3,3,1,2,1,1 +1,12,1,4,1655,2,5,2,4,1,4,4,63,3,3,2,2,1,2,1,1 +2,24,3,2,2812,1,5,2,2,1,4,4,26,3,2,1,3,1,1,1,1 +2,36,1,7,8065,2,3,3,2,1,2,1,25,3,3,2,4,1,2,1,0 +1,21,1,2,3275,2,5,1,4,1,4,2,36,3,3,1,4,1,2,1,1 +1,24,1,4,2223,3,5,4,4,1,4,3,52,1,3,2,3,1,1,1,1 +4,12,1,1,1480,4,1,2,4,1,4,1,66,1,1,3,1,1,1,1,1 +2,24,3,1,1371,1,3,4,2,1,4,4,25,3,2,1,3,1,1,1,0 +1,36,1,1,3535,2,4,4,4,1,4,2,37,3,3,2,3,1,2,1,1 +2,18,3,4,3509,2,4,4,2,3,1,4,25,3,3,1,3,1,1,1,1 +1,36,1,2,5711,5,5,4,4,1,2,2,38,3,3,2,4,1,2,1,1 +3,18,3,6,3872,2,1,2,2,1,4,2,67,3,3,1,3,1,2,1,1 +3,39,1,4,4933,2,4,2,4,3,2,4,25,3,3,2,3,1,1,1,0 +1,24,1,1,1940,5,5,4,4,1,4,4,60,3,3,1,3,1,2,1,1 +3,12,2,9,1410,2,3,2,4,1,2,4,31,3,3,1,2,1,2,1,1 +3,12,3,1,836,3,2,4,2,1,2,3,23,1,3,1,2,1,1,1,0 +3,20,3,2,6468,1,1,1,1,1,4,4,60,3,3,1,4,1,2,1,1 +3,18,3,10,1941,5,3,4,4,1,2,3,35,3,3,1,2,1,2,1,1 +1,22,3,4,2675,4,5,3,4,1,4,2,40,3,3,1,3,1,1,1,1 +1,48,1,2,2751,1,5,4,4,1,3,2,38,3,3,2,3,2,2,1,1 +3,48,0,7,6224,2,5,4,4,1,4,1,50,3,1,1,3,1,1,1,0 +2,40,1,7,5998,2,3,4,4,1,3,1,27,1,3,1,3,1,2,1,0 +3,21,3,10,1188,2,5,2,2,1,4,3,39,3,3,1,3,2,1,1,0 +1,24,3,2,6313,1,5,3,4,1,4,2,41,3,3,1,4,2,2,1,1 +1,6,1,3,1221,1,3,1,3,1,2,3,27,3,3,2,3,1,1,1,1 +4,24,3,3,2892,2,5,3,1,1,4,1,51,3,1,1,3,1,1,1,1 +1,24,3,3,3062,4,5,4,4,1,3,1,32,3,2,1,3,1,2,1,1 +1,9,3,3,2301,3,2,2,2,1,4,3,22,3,2,1,3,1,1,1,1 +2,18,3,2,7511,1,5,1,4,1,4,3,51,3,1,1,3,2,2,1,0 +1,12,1,3,1258,2,2,2,2,1,4,3,22,3,2,2,2,1,1,1,1 +1,24,0,1,717,1,5,4,3,1,4,2,54,3,3,2,3,1,2,1,1 +3,9,3,1,1549,1,2,4,4,1,2,4,35,3,3,1,1,1,1,1,1 +1,24,1,7,1597,2,5,4,4,1,4,1,54,3,1,2,3,2,1,1,1 +3,18,1,4,1795,2,5,3,2,3,4,4,48,1,2,2,2,1,2,1,1 +2,20,1,3,4272,2,5,1,2,1,4,3,24,3,3,2,3,1,1,1,1 +1,12,1,4,976,1,5,4,4,1,4,2,35,3,3,2,3,1,1,1,1 +3,12,3,1,7472,1,1,1,2,1,2,4,24,3,2,1,1,1,1,1,1 +2,36,3,1,9271,2,4,2,4,1,1,2,24,3,3,1,3,1,2,1,0 +3,6,3,4,590,2,2,3,3,1,3,4,26,3,3,1,2,1,1,2,1 +1,12,1,4,930,1,5,4,4,1,4,4,65,3,3,4,3,1,1,1,1 +3,42,4,2,9283,2,1,1,4,1,2,1,55,1,1,1,4,1,2,1,1 +3,15,2,1,1778,2,2,2,2,1,1,4,26,3,2,2,1,1,1,1,0 +3,8,3,10,907,2,2,3,3,1,2,4,26,3,3,1,3,1,2,1,1 +3,6,3,4,484,2,4,3,3,3,3,4,28,1,3,1,2,1,1,1,1 +2,36,1,2,9629,2,4,4,4,1,4,2,24,3,3,2,3,1,2,1,0 +2,48,3,5,3051,2,3,3,4,1,4,2,54,3,3,1,3,1,1,1,0 +2,48,3,1,3931,2,4,4,4,1,4,1,46,3,1,1,3,2,1,1,0 +3,36,0,1,7432,2,3,2,2,1,2,3,54,3,2,1,3,1,1,1,1 +1,6,3,5,1338,4,3,1,1,1,4,4,62,3,3,1,3,1,1,1,1 +1,6,1,4,1554,2,4,1,2,1,2,2,24,3,2,2,3,1,2,1,1 +2,36,3,0,15857,2,1,2,1,2,3,2,43,3,3,1,4,1,1,1,1 +2,18,3,4,1345,2,3,4,3,1,3,4,26,1,3,1,3,1,1,1,0 +1,12,3,1,1101,2,3,3,3,1,2,4,27,3,3,2,3,1,2,1,1 +4,12,3,4,3016,2,3,3,3,1,1,2,24,3,3,1,3,1,1,1,1 +2,36,3,3,2712,2,5,2,4,1,2,3,41,1,3,1,3,2,1,1,0 +2,8,1,1,731,2,5,4,4,1,4,4,47,3,3,2,2,1,1,1,1 +1,18,1,3,3780,2,2,3,1,1,2,2,35,3,3,2,4,1,2,1,1 +2,21,1,1,1602,2,5,4,3,1,3,2,30,3,3,2,3,1,2,1,1 +2,18,1,1,3966,2,5,1,2,1,4,4,33,1,2,3,3,1,2,1,0 +1,18,2,10,4165,2,3,2,4,1,2,2,36,2,3,2,3,2,1,1,0 +2,36,3,2,8335,1,5,3,4,1,4,1,47,3,1,1,3,1,1,1,0 +3,48,0,10,6681,1,3,4,4,1,4,1,38,3,1,1,3,2,2,1,1 +1,24,0,10,2375,4,3,4,4,1,2,2,44,3,3,2,3,2,2,1,1 +2,18,3,1,1216,2,2,4,2,1,3,2,23,3,2,1,3,1,2,1,0 +2,45,2,10,11816,2,5,2,4,1,4,2,29,3,2,2,3,1,1,1,0 +3,24,3,4,5084,1,5,2,2,1,4,2,42,3,3,1,3,1,2,1,1 +4,15,3,4,2327,2,2,2,2,1,3,4,25,3,3,1,2,1,1,1,0 +2,12,2,1,1082,2,3,4,4,1,4,2,48,1,3,2,3,1,1,1,0 +1,12,3,4,886,1,3,4,2,1,2,2,21,3,3,1,3,1,1,1,1 +1,4,3,3,601,2,2,1,2,1,3,4,23,3,2,1,2,2,1,1,1 +2,24,1,2,2957,2,5,4,4,1,4,3,63,3,3,2,3,1,2,1,1 +1,24,1,4,2611,2,5,4,3,2,3,4,46,3,3,2,3,1,1,1,1 +2,36,3,3,5179,2,4,4,4,1,2,3,29,3,3,1,3,1,1,1,0 +1,21,0,2,2993,2,3,3,4,1,2,4,28,2,3,2,2,1,1,1,1 +1,18,3,6,1943,2,2,4,2,1,4,4,23,3,3,1,3,1,1,1,0 +1,24,4,10,1559,2,4,4,4,1,4,2,50,1,3,1,3,1,2,1,1 +1,18,3,3,3422,2,5,4,4,1,4,3,47,1,3,3,3,2,2,1,1 +3,21,3,3,3976,1,4,2,4,1,3,2,35,3,3,1,3,1,2,1,1 +1,18,3,1,6761,1,3,2,4,1,4,2,68,3,2,2,3,1,1,1,0 +1,24,3,1,1249,2,2,4,3,1,2,4,28,3,3,1,3,1,1,1,1 +2,9,3,4,1364,2,4,3,4,1,4,4,59,3,3,1,3,1,1,1,1 +2,12,3,4,709,2,5,4,4,1,4,4,57,2,3,1,2,1,1,1,0 +2,20,1,1,2235,2,3,4,3,3,2,3,33,1,2,2,3,1,1,2,0 +1,24,1,2,4042,1,4,3,4,1,4,3,43,3,3,2,3,1,2,1,1 +1,15,1,4,1471,2,3,4,4,1,4,1,35,3,1,2,3,1,2,1,1 +2,18,4,1,1442,2,4,4,4,1,4,1,32,3,1,2,2,2,1,1,0 +1,36,0,1,10875,2,5,2,4,1,2,2,45,3,3,2,3,2,2,1,1 +1,24,3,1,1474,3,2,4,3,1,3,4,33,3,3,1,3,1,2,1,1 +1,10,3,9,894,1,4,4,2,1,3,3,40,3,3,1,3,1,2,1,1 +1,15,1,3,3343,2,3,4,4,1,2,1,28,3,1,1,3,1,2,1,1 +2,15,3,1,3959,2,3,3,2,1,2,3,29,3,3,1,3,1,2,1,0 +1,9,3,1,3577,3,3,1,4,3,2,4,26,3,2,1,3,2,1,2,1 +1,24,1,2,5804,5,3,4,4,1,2,4,27,3,3,2,3,1,1,1,1 +1,18,0,10,2169,2,3,4,3,1,2,2,28,3,3,1,3,1,2,1,0 +2,24,3,4,2439,2,2,4,2,1,4,4,35,3,3,1,3,1,2,1,0 +1,27,1,3,4526,5,2,4,4,1,2,4,32,2,3,2,2,2,2,1,1 +1,10,3,3,2210,2,3,2,4,1,2,4,25,1,2,1,2,1,1,1,0 +1,15,3,3,2221,4,3,2,2,1,4,2,20,3,2,1,3,1,1,1,1 +2,18,3,4,2389,2,2,4,2,1,1,2,27,2,3,1,3,1,1,1,1 +1,12,1,3,3331,2,5,2,4,1,4,3,42,2,3,1,3,1,1,1,1 +1,36,3,10,7409,1,5,3,4,1,2,3,37,3,3,2,3,1,1,1,1 +2,12,3,3,652,2,5,4,2,1,4,3,24,3,2,1,3,1,1,1,1 +1,36,0,3,7678,4,4,2,2,1,4,2,40,3,3,2,3,1,2,1,1 +4,6,1,1,1343,2,5,1,4,1,4,4,46,3,3,2,3,2,1,2,1 +2,24,1,10,1382,3,4,4,4,1,1,4,26,3,3,2,3,1,2,1,1 +1,15,3,5,874,1,2,4,2,1,1,4,24,3,3,1,3,1,1,1,1 +2,12,3,3,3590,2,3,2,4,2,2,3,29,3,3,1,2,2,1,1,1 +3,11,1,1,1322,5,3,4,2,1,4,2,40,3,3,2,3,1,1,1,1 +2,18,4,4,1940,2,2,3,4,2,4,1,36,1,1,1,4,1,2,1,1 +1,36,3,4,3595,2,5,4,4,1,2,2,28,3,3,1,3,1,1,1,1 +2,9,3,1,1422,2,2,3,4,1,2,1,27,3,1,1,4,1,2,1,0 +1,30,1,4,6742,1,4,2,4,1,3,3,36,3,3,2,3,1,1,1,1 +1,24,3,2,7814,2,4,3,4,1,3,2,38,3,3,1,4,1,2,1,1 +1,24,3,2,9277,1,3,2,1,1,4,1,48,3,1,1,3,1,2,1,1 +3,30,1,1,2181,1,5,4,4,1,4,4,36,3,3,2,3,1,1,1,1 +1,18,1,4,1098,2,1,4,2,1,4,2,65,3,3,2,1,1,1,1,1 +3,24,3,3,4057,2,4,3,1,1,3,2,43,3,3,1,3,1,2,1,0 +2,12,3,7,795,2,2,4,2,1,4,3,53,3,3,1,3,1,1,1,0 +3,24,1,10,2825,1,4,4,4,1,3,1,34,3,3,2,3,2,2,1,1 +3,48,3,10,15672,2,3,2,4,1,2,2,23,3,3,1,3,1,2,1,0 +1,36,1,1,6614,2,5,4,4,1,4,2,34,3,3,2,4,1,2,1,1 +1,28,4,2,7824,1,2,3,4,3,4,4,40,1,2,2,3,2,2,1,1 +2,27,1,10,2442,2,5,4,4,1,4,2,43,2,3,4,4,2,2,1,1 +1,15,1,4,1829,2,5,4,4,1,4,2,46,3,3,2,3,1,2,1,1 +2,12,1,1,2171,2,3,4,4,1,4,3,38,1,3,2,2,1,1,2,1 +3,36,1,2,5800,2,3,3,4,1,4,2,34,3,3,2,3,1,2,1,1 +1,18,1,4,1169,1,3,4,4,1,3,3,29,3,3,2,3,1,2,1,1 +1,36,0,2,8947,1,4,3,4,1,2,2,31,2,3,1,4,2,2,1,1 +2,21,3,4,2606,2,2,4,2,1,4,3,28,3,2,1,4,1,2,1,1 +1,12,1,3,1592,5,4,3,2,1,2,3,35,3,3,1,3,1,1,2,1 +1,15,3,3,2186,1,4,1,2,1,4,4,33,1,2,1,2,1,1,1,1 +2,18,3,3,4153,2,3,2,4,2,3,2,42,3,3,1,3,1,1,1,0 +2,16,1,1,2625,2,5,2,4,3,4,3,43,1,2,1,3,1,2,1,0 +1,20,1,1,3485,1,2,2,1,1,4,4,44,3,3,2,3,1,2,1,1 +1,36,1,2,10477,1,5,2,4,1,4,1,42,3,1,2,3,1,1,1,1 +1,15,3,4,1386,1,3,4,3,1,2,4,40,3,2,1,3,1,2,1,1 +1,24,3,4,1278,2,5,4,4,1,1,4,36,3,3,1,4,1,2,1,1 +2,12,3,4,1107,2,3,2,4,1,2,4,20,3,2,1,4,2,2,1,1 +2,21,3,1,3763,1,4,2,4,2,2,4,24,3,3,1,2,1,1,2,1 +3,36,3,7,3711,1,3,2,3,1,2,2,27,3,3,1,3,1,1,1,1 +1,15,0,2,3594,2,2,1,2,1,2,3,46,3,3,2,2,1,1,1,1 +3,9,3,1,3195,1,3,1,2,1,2,4,33,3,3,1,2,1,1,1,1 +1,36,0,4,4454,2,3,4,2,1,4,4,34,3,3,2,3,1,1,1,1 +3,24,1,3,4736,2,2,2,2,1,4,2,25,1,3,1,2,1,1,1,0 +3,30,3,4,2991,1,5,2,2,1,4,2,25,3,3,1,3,1,1,1,1 +1,11,3,10,2142,5,5,1,1,1,2,4,28,3,3,1,3,1,2,1,1 +2,24,4,10,3161,2,3,4,4,1,2,3,31,3,2,1,3,1,2,1,0 +3,48,2,0,18424,2,3,1,2,1,2,3,32,1,3,1,4,1,2,2,0 +1,10,3,2,2848,3,3,1,4,2,2,4,32,3,3,1,3,2,1,1,1 +2,6,3,1,14896,2,5,1,4,1,4,1,68,1,3,1,4,1,2,1,0 +2,24,3,3,2359,3,1,1,1,1,1,3,33,3,3,1,3,1,1,1,0 +2,24,3,3,3345,2,5,4,4,1,2,3,39,3,2,1,4,1,2,1,0 +1,18,1,3,1817,2,3,4,2,1,2,1,28,3,3,2,3,1,1,1,1 +1,48,0,4,12749,4,4,4,4,1,1,2,37,3,3,1,4,1,2,1,1 +2,9,3,4,1366,2,2,3,2,1,4,3,22,3,2,1,3,1,1,1,0 +3,12,3,1,2002,2,4,3,4,1,4,3,30,3,2,1,3,2,2,1,1 +2,24,4,3,6872,2,2,2,1,1,1,3,55,1,3,1,3,1,2,1,0 +2,12,4,1,697,2,2,4,4,1,2,2,46,1,3,2,3,1,2,1,0 +2,18,1,3,1049,2,2,4,2,1,4,3,21,3,2,1,3,1,1,1,1 +2,48,3,2,10297,2,4,4,4,1,4,1,39,2,1,3,3,2,2,1,0 +1,30,3,4,1867,1,5,4,4,1,4,2,58,3,3,1,3,1,2,1,1 +2,12,0,1,1344,2,3,4,4,1,2,4,43,3,3,2,2,2,1,1,1 +2,24,3,3,1747,2,2,4,4,2,1,3,24,3,3,1,2,1,1,2,1 +3,9,3,4,1670,2,2,4,2,1,2,2,22,3,3,1,3,1,2,1,0 +1,9,1,1,1224,2,3,3,4,1,1,4,30,3,3,2,3,1,1,1,1 +1,12,1,4,522,4,5,4,4,1,4,3,42,3,3,2,3,2,2,1,1 +2,12,3,4,1498,2,3,4,2,1,1,2,23,1,3,1,3,1,1,1,1 +3,30,0,4,1919,3,2,4,4,1,3,1,30,2,3,2,4,1,1,1,0 +4,9,3,4,745,2,3,3,2,1,2,4,28,3,3,1,2,1,1,1,0 +3,6,3,4,2063,2,2,4,3,1,3,2,30,3,2,1,4,1,2,1,1 +3,60,3,7,6288,2,3,4,4,1,4,1,42,3,1,1,3,1,1,1,0 +1,24,1,2,6842,1,3,2,4,1,4,3,46,3,3,2,4,2,2,1,1 +1,12,3,1,3527,1,2,2,4,1,3,3,45,3,3,1,4,2,2,1,1 +1,10,3,1,1546,2,3,3,4,1,2,4,31,3,3,1,2,2,1,2,1 +1,24,3,3,929,1,4,4,4,1,2,2,31,2,3,1,3,1,2,1,1 +1,4,1,1,1455,2,4,2,4,1,1,4,42,3,3,3,2,2,1,1,1 +2,15,3,3,1845,2,2,4,2,3,1,3,46,3,2,1,3,1,1,1,1 +3,48,2,1,8358,4,2,1,2,1,1,2,30,3,3,2,3,1,1,1,1 +2,24,4,3,3349,4,2,4,4,1,4,1,30,3,1,1,3,2,2,1,0 +1,12,3,1,2859,1,1,4,4,1,4,1,38,3,3,1,4,1,2,1,1 +1,18,3,3,1533,2,2,4,3,2,1,3,43,3,3,1,2,2,1,1,0 +1,24,3,4,3621,3,5,2,4,1,4,2,31,3,3,2,3,1,1,1,0 +3,18,1,10,3590,2,1,3,3,1,3,2,40,3,3,3,1,2,2,1,1 +2,36,0,10,2145,2,4,2,4,1,1,2,24,3,3,2,3,1,2,1,0 +3,24,3,2,4113,4,2,3,2,1,4,2,28,3,2,1,3,1,1,1,0 +1,36,3,3,10974,2,1,4,2,1,2,2,26,3,3,2,4,1,2,1,0 +2,12,3,1,1893,2,3,4,2,3,4,3,29,3,3,1,3,1,2,1,1 +2,24,1,4,1231,5,5,4,2,1,4,3,57,3,2,2,4,1,2,1,1 +4,30,1,4,3656,1,5,4,4,1,4,3,49,2,3,2,2,1,1,1,1 +3,9,1,4,1154,2,5,2,4,1,4,4,37,3,3,3,2,1,1,1,1 +2,28,3,1,4006,2,3,3,4,1,2,2,45,3,3,1,2,1,1,1,0 +3,24,3,3,3069,3,5,4,4,1,4,1,30,3,1,1,3,1,1,1,1 +1,6,1,4,1740,2,5,2,3,1,2,4,30,3,2,2,3,1,1,1,1 +3,21,0,1,2353,2,3,1,1,1,4,3,47,3,3,2,3,1,1,1,1 +1,15,3,1,3556,1,3,3,4,1,2,1,29,3,3,1,3,1,1,1,1 +1,24,3,4,2397,4,5,3,4,1,2,2,35,1,3,2,3,1,2,1,0 +3,6,3,6,454,2,2,3,3,1,1,3,22,3,3,1,2,1,1,1,1 +3,30,3,4,1715,1,3,4,2,1,1,2,26,3,3,1,3,1,1,1,1 +3,27,1,4,2520,4,3,4,4,1,2,3,23,3,3,2,2,1,1,1,0 +1,15,3,4,3568,2,5,4,2,1,2,2,54,1,2,1,4,1,2,1,1 +1,42,3,4,7166,1,4,2,3,1,4,3,29,3,2,1,3,1,2,1,1 +2,11,1,1,3939,2,3,1,4,1,2,4,40,3,3,2,2,2,1,1,1 +3,15,3,6,1514,3,3,4,4,3,2,4,22,3,3,1,3,1,1,1,1 +1,24,3,1,7393,2,3,1,4,1,4,3,43,3,3,1,2,2,1,1,1 +2,24,4,1,1193,2,1,1,2,2,4,1,29,3,2,2,1,1,1,1,0 +2,60,3,10,7297,2,5,4,4,2,4,1,36,3,2,1,3,1,1,1,0 +1,30,1,4,2831,2,3,4,2,1,2,2,33,3,3,1,3,1,2,1,1 +4,24,3,4,1258,4,3,3,2,1,3,2,57,3,3,1,2,1,1,1,1 +3,6,3,4,753,2,3,2,2,3,3,4,64,3,3,1,3,1,1,1,1 +3,18,0,10,2427,1,5,4,4,1,2,3,42,3,3,2,3,1,1,1,1 +1,24,0,1,2538,2,5,4,4,1,4,2,47,3,3,2,2,2,1,1,0 +3,15,4,1,1264,3,3,2,3,1,2,3,25,3,2,1,3,1,1,1,0 +3,30,1,3,8386,2,4,2,4,1,2,3,49,3,3,1,3,1,1,1,0 +1,48,3,10,4844,2,1,3,4,1,2,2,33,1,2,1,4,1,2,1,0 +4,21,3,1,2923,3,3,1,2,1,1,2,28,1,3,1,4,1,2,1,1 +2,36,3,2,8229,2,3,2,4,1,2,3,26,3,3,1,3,2,1,1,0 +1,24,1,3,2028,2,4,2,4,1,2,3,30,3,3,2,2,1,1,1,1 +2,15,1,3,1433,2,3,4,2,1,3,3,25,3,2,2,3,1,1,1,1 +4,42,2,10,6289,2,2,2,1,1,1,3,33,3,3,2,3,1,1,1,1 +1,13,3,4,1409,3,1,2,2,1,4,4,64,3,3,1,3,1,1,1,1 +2,24,3,2,6579,2,1,4,4,1,2,1,29,3,1,1,4,1,2,1,1 +3,24,1,4,1743,2,5,4,4,1,2,3,48,3,3,2,2,1,1,1,1 +1,12,1,7,3565,1,2,2,4,1,1,3,37,3,3,2,2,2,1,1,1 +1,15,4,4,1569,3,5,4,4,1,4,2,34,1,3,1,2,2,1,1,1 +2,18,3,4,1936,1,4,2,3,1,4,2,23,3,2,2,2,1,1,1,1 +2,36,3,3,3959,2,1,4,4,1,3,3,30,3,3,1,4,1,2,1,1 +1,12,3,1,2390,1,5,4,4,1,3,2,50,3,3,1,3,1,2,1,1 +1,12,3,3,1736,2,4,3,2,1,4,4,31,3,3,1,2,1,1,1,1 +2,30,3,2,3857,2,3,4,1,1,4,3,40,3,3,1,4,1,2,1,1 +1,12,3,4,804,2,5,4,4,1,4,2,38,3,3,1,3,1,1,1,1 +2,45,3,4,1845,2,3,4,4,1,4,1,23,3,1,1,3,1,2,1,0 +3,45,1,2,4576,3,1,3,4,1,4,2,27,3,3,1,3,1,1,1,1 diff --git a/methods/catalog/rbr/library/reproduce/data/german_modified.csv b/methods/catalog/rbr/library/reproduce/data/german_modified.csv new file mode 100644 index 0000000..58c85b8 --- /dev/null +++ b/methods/catalog/rbr/library/reproduce/data/german_modified.csv @@ -0,0 +1,1001 @@ +status,duration,credit_history,purpose,amount,savings,employment_duration,installment_rate,personal_status_sex,other_debtors,present_residence,property,age,other_installment_plans,housing,number_credits,job,people_liable,telephone,foreign_worker,credit_risk +1,18,4,2,1049,1,2,4,2,1,4,2,21,3,1,1,3,2,1,2,1 +1,9,4,0,2799,1,3,2,3,1,2,1,36,3,1,2,3,1,1,2,1 +2,12,2,9,841,2,4,2,2,1,4,1,23,3,1,1,2,2,1,2,1 +1,12,4,0,2122,1,3,3,3,1,2,1,39,3,1,2,2,1,1,1,1 +1,12,4,0,2171,1,3,4,3,1,4,2,38,1,2,2,2,2,1,1,1 +1,10,4,0,2241,1,2,1,3,1,3,1,48,3,1,2,2,1,1,1,1 +1,8,4,0,3398,1,4,1,3,1,4,1,39,3,2,2,2,2,1,1,1 +1,6,4,0,1361,1,2,2,3,1,4,1,40,3,2,1,2,1,1,1,1 +4,18,4,3,1098,1,1,4,2,1,4,3,65,3,2,2,1,2,1,2,1 +2,24,2,3,3758,3,1,1,2,1,4,4,23,3,1,1,1,2,1,2,1 +1,11,4,0,3905,1,3,2,3,1,2,1,36,3,1,2,3,1,1,2,1 +1,30,4,1,6187,2,4,1,4,1,4,3,24,3,1,2,3,2,1,2,1 +1,6,4,3,1957,1,4,1,2,1,4,3,31,3,2,1,3,2,1,2,1 +2,48,3,10,7582,2,1,2,3,1,4,4,31,3,2,1,4,2,2,2,1 +1,18,2,3,1936,5,4,2,4,1,4,3,23,3,1,2,2,2,1,2,1 +1,6,2,3,2647,3,3,2,3,1,3,1,44,3,1,1,3,1,1,2,1 +1,11,4,0,3939,1,3,1,3,1,2,1,40,3,2,2,2,1,1,2,1 +2,18,2,3,3213,3,2,1,4,1,3,1,25,3,1,1,3,2,1,2,1 +2,36,4,3,2337,1,5,4,3,1,4,1,36,3,2,1,3,2,1,2,1 +4,11,4,0,7228,1,3,1,3,1,4,2,39,3,2,2,2,2,1,2,1 +1,6,4,0,3676,1,3,1,3,1,3,1,37,3,1,3,3,1,1,2,1 +2,12,4,0,3124,1,2,1,3,1,3,1,49,1,2,2,2,1,1,2,1 +2,36,2,5,2384,1,2,4,3,1,1,4,33,3,1,1,2,2,1,2,0 +2,12,4,4,1424,1,4,4,3,1,3,2,26,3,2,1,3,2,1,2,1 +1,6,4,0,4716,5,2,1,3,1,3,1,44,3,2,2,2,1,1,2,1 +2,11,3,3,4771,1,4,2,3,1,4,2,51,3,2,1,3,2,1,2,1 +1,12,2,2,652,1,5,4,2,1,4,2,24,3,1,1,3,2,1,2,1 +2,9,4,3,1154,1,5,2,3,1,4,1,37,3,2,3,2,2,1,2,1 +4,15,2,0,3556,5,3,3,3,1,2,4,29,3,2,1,3,2,1,2,1 +3,42,4,1,4796,1,5,4,3,1,4,4,56,3,3,1,3,2,1,2,1 +3,30,4,3,3017,1,5,4,3,1,4,2,47,3,2,1,3,2,1,2,1 +4,36,4,0,3535,1,4,4,3,1,4,3,37,3,2,2,3,2,2,2,1 +4,36,4,0,6614,1,5,4,3,1,4,3,34,3,2,2,4,2,2,2,1 +4,24,2,3,1376,3,4,4,2,1,1,3,28,3,2,1,3,2,1,2,1 +1,15,2,0,1721,1,2,2,3,1,3,1,36,3,2,1,3,2,1,2,1 +1,6,4,0,860,1,5,1,2,1,4,4,39,3,2,2,3,2,2,2,1 +4,12,4,0,1495,1,5,4,3,1,1,1,38,3,2,2,2,1,1,2,1 +4,12,4,3,1934,1,5,2,3,1,2,4,26,3,2,2,3,2,1,2,1 +4,18,2,1,3378,5,3,2,3,1,1,2,31,3,2,1,3,2,2,2,1 +4,24,4,1,3868,1,5,4,2,1,2,3,41,3,1,2,4,2,2,2,1 +4,12,4,5,996,5,4,4,2,1,4,1,23,3,2,2,3,2,1,2,1 +1,24,2,10,1755,1,5,4,2,3,4,1,58,3,2,1,2,2,2,2,1 +4,18,4,0,1028,1,3,4,2,1,3,1,36,3,2,2,3,2,1,2,1 +2,24,4,9,2825,5,4,4,3,1,3,4,34,3,2,2,3,1,2,2,1 +2,18,2,6,1239,5,3,4,3,1,4,4,61,3,3,1,3,2,1,2,1 +1,18,2,0,1216,1,2,4,2,1,3,3,23,3,1,1,3,2,2,2,0 +4,24,2,9,1258,1,4,4,3,1,1,1,25,3,2,1,3,2,2,2,1 +4,18,4,6,1864,2,3,4,2,1,2,1,30,3,2,2,3,2,1,2,0 +4,24,2,0,1474,2,2,4,4,1,3,1,33,3,2,1,3,2,2,2,1 +1,24,4,9,1382,2,4,4,3,1,1,1,26,3,2,2,3,2,2,2,1 +4,12,2,0,640,1,3,4,1,1,2,1,49,3,2,1,2,2,1,2,1 +3,36,2,3,3919,1,3,2,3,1,2,1,23,3,2,1,3,2,2,2,1 +4,9,4,0,1224,1,3,3,3,1,1,1,30,3,2,2,3,2,1,2,1 +4,12,4,3,2331,5,5,1,3,2,4,1,49,3,2,1,3,2,2,2,1 +4,24,2,1,6313,5,5,3,3,1,4,3,41,3,2,1,4,1,2,2,1 +1,12,4,3,385,1,4,4,2,1,3,1,58,3,2,4,2,2,2,2,1 +4,12,4,3,1655,1,5,2,3,1,4,1,63,3,2,2,2,2,2,2,1 +1,15,2,3,1053,1,2,4,4,1,2,1,27,3,2,1,3,2,1,1,1 +4,21,2,3,3160,5,5,4,3,1,3,2,41,3,2,1,3,2,2,2,1 +4,36,2,0,3079,5,3,4,3,1,4,1,36,3,2,1,3,2,1,2,1 +4,12,4,0,1163,3,3,4,3,1,4,1,44,3,2,1,3,2,2,2,1 +4,24,2,1,2679,1,2,4,2,1,1,4,29,3,2,1,4,2,2,2,1 +4,48,4,3,3578,5,5,4,3,1,1,1,47,3,2,1,3,2,2,2,1 +4,36,3,0,10875,1,5,2,3,1,2,3,45,3,2,2,3,1,2,2,1 +1,12,3,0,1344,1,3,4,3,1,2,1,43,3,2,2,2,1,1,2,1 +4,6,4,3,1237,2,3,1,2,1,1,2,27,3,2,2,3,2,1,2,1 +4,12,2,3,3077,1,3,2,3,1,4,3,52,3,2,1,3,2,2,2,1 +4,24,2,3,2284,1,4,4,3,1,2,3,28,3,2,1,3,2,2,2,1 +2,12,2,3,1567,1,3,1,2,1,1,3,22,3,2,1,3,2,2,2,1 +4,24,3,0,2032,1,5,4,3,1,4,4,60,3,3,2,3,2,2,2,1 +2,21,4,2,2745,4,4,3,3,1,2,3,32,3,2,2,3,2,2,2,1 +4,30,2,3,1867,5,5,4,3,1,4,3,58,3,2,1,3,2,2,2,1 +4,36,2,3,2299,3,5,4,3,1,4,3,39,3,2,1,3,2,1,2,1 +4,24,2,2,929,5,4,4,3,1,2,3,31,2,2,1,3,2,2,2,1 +3,12,2,3,3399,5,5,2,3,1,3,3,37,3,2,1,4,2,1,2,1 +2,9,2,2,2030,5,4,2,3,1,1,3,24,3,2,1,3,2,2,2,1 +4,21,4,1,3275,1,5,1,3,1,4,3,36,3,2,1,4,2,2,2,1 +4,24,4,0,1940,4,5,4,3,1,4,1,60,3,2,1,3,2,2,2,1 +1,21,4,0,1602,1,5,4,4,1,3,3,30,3,2,2,3,2,2,2,1 +4,15,2,3,1979,5,5,4,3,1,2,3,35,3,2,1,3,2,1,2,1 +4,24,4,0,2022,1,3,4,2,1,4,3,37,3,2,1,3,2,2,2,1 +4,36,4,3,3342,5,5,4,3,1,2,3,51,3,2,1,3,2,2,2,1 +2,18,2,0,5866,2,3,2,3,1,2,3,30,3,2,2,3,2,2,2,1 +3,15,4,1,2360,3,3,2,3,1,2,3,36,3,2,1,3,2,2,2,1 +4,15,4,2,1520,5,5,4,3,1,4,2,63,3,2,1,3,2,1,2,1 +1,12,2,0,3651,4,3,1,3,1,3,2,31,3,2,1,3,1,1,2,1 +4,24,4,1,2346,1,4,4,3,1,3,3,35,3,2,2,3,2,2,2,1 +4,36,3,3,4454,1,3,4,2,1,4,1,34,3,2,2,3,2,1,2,1 +1,6,4,0,666,4,4,3,2,1,4,1,39,3,2,2,2,2,2,2,1 +2,24,3,0,1965,5,3,4,2,1,4,3,42,3,1,2,3,2,2,2,1 +2,12,4,0,1995,2,2,4,3,1,1,3,27,3,2,1,3,2,1,2,1 +2,30,2,3,2991,5,5,2,2,1,4,3,25,3,2,1,3,2,1,2,1 +2,30,0,9,4221,1,3,2,2,1,1,3,28,3,2,2,3,2,1,2,1 +1,9,2,3,1364,1,4,3,3,1,4,1,59,3,2,1,3,2,1,2,1 +2,18,4,2,6361,1,5,2,3,1,1,4,41,3,2,1,3,2,2,2,1 +4,27,4,2,4526,4,2,4,3,1,2,1,32,2,2,2,2,1,2,2,1 +2,12,4,3,3573,1,3,1,2,1,1,1,23,3,2,1,2,2,1,2,1 +2,36,3,9,4455,1,3,2,1,1,2,1,30,2,2,2,4,2,2,2,0 +1,9,2,2,2136,1,3,3,3,1,2,1,25,3,2,1,3,2,1,2,1 +2,42,4,9,5954,1,4,2,2,1,1,1,41,1,2,2,2,2,1,2,1 +4,24,4,2,3777,4,3,4,3,1,4,1,40,3,2,1,3,2,2,2,1 +1,15,2,9,806,1,3,4,2,1,4,2,22,3,2,1,2,2,1,2,1 +2,24,3,9,4712,5,3,4,3,1,2,2,34,1,2,2,4,2,2,2,1 +2,36,3,0,7432,1,3,2,2,1,2,2,54,3,1,1,3,2,1,2,1 +4,24,4,3,1851,1,4,4,4,3,2,3,33,3,2,2,3,2,2,2,1 +4,24,2,0,1393,1,3,2,3,3,2,1,31,3,2,1,3,2,2,2,1 +4,12,4,9,1412,1,3,4,2,3,2,1,29,3,2,2,4,2,2,2,1 +4,18,2,3,1473,1,2,3,4,1,4,1,39,3,2,1,3,2,2,2,1 +4,24,2,3,1533,1,2,4,2,1,3,3,38,2,2,1,3,2,2,2,1 +4,12,4,6,2012,5,4,4,2,1,2,3,61,3,2,1,3,2,1,2,1 +1,15,2,0,3959,1,3,3,2,1,2,2,29,3,2,1,3,2,2,2,0 +1,6,2,2,428,1,5,2,2,1,1,2,49,1,2,1,3,2,2,2,1 +2,12,4,0,2366,3,4,3,1,1,3,3,36,3,2,1,4,2,2,2,1 +4,12,2,2,763,1,3,4,2,1,1,1,26,3,2,1,3,2,2,2,1 +2,21,2,2,3976,5,4,2,3,1,3,3,35,3,2,1,3,2,2,2,1 +2,18,2,0,6260,1,4,3,3,1,3,1,28,3,1,1,2,2,1,2,1 +2,9,4,2,1919,1,4,4,3,1,3,3,35,3,1,1,3,2,2,2,1 +4,24,2,1,2603,4,3,2,2,1,4,3,28,3,1,1,3,2,2,2,1 +4,9,4,6,936,3,5,4,3,1,2,3,52,3,2,2,3,2,2,2,1 +4,24,2,2,3062,3,5,4,3,1,3,4,32,3,1,1,3,2,2,2,1 +2,36,2,3,4795,1,2,4,2,1,1,4,30,3,2,1,4,2,2,2,1 +4,36,4,1,5842,1,5,2,3,1,2,2,35,3,2,2,3,1,2,2,1 +2,6,2,3,2063,1,2,4,4,1,3,3,30,3,1,1,4,2,2,2,1 +4,15,4,3,1459,1,3,4,2,1,2,3,43,3,2,1,2,2,1,2,1 +4,15,2,3,1213,3,5,4,3,1,3,2,47,2,2,1,3,2,2,2,1 +4,24,4,3,5103,1,2,3,4,1,3,4,47,3,3,3,3,2,2,2,1 +4,15,2,4,874,5,2,4,2,1,1,1,24,3,2,1,3,2,1,2,1 +4,6,2,2,2978,3,3,1,3,1,2,3,32,3,2,1,3,2,2,2,1 +4,18,2,0,1820,1,3,2,4,1,2,2,30,3,2,1,4,2,2,2,1 +4,24,4,3,2872,2,5,3,3,1,4,1,36,3,2,1,3,1,2,2,1 +3,24,2,2,1925,1,3,2,3,1,2,1,26,3,2,1,3,2,1,2,1 +4,18,2,2,2515,1,3,3,3,1,4,1,43,3,2,1,3,2,2,2,1 +3,6,2,2,2116,1,3,2,3,1,2,1,41,3,2,1,3,2,2,2,1 +4,18,2,3,1453,1,2,3,2,1,1,1,26,3,2,1,3,2,1,2,1 +4,10,2,0,1364,1,3,2,2,1,4,3,64,3,2,1,3,2,2,2,1 +4,6,2,2,1543,4,3,4,1,1,2,1,33,3,2,1,3,2,1,2,1 +2,12,2,0,1318,4,5,4,3,1,4,1,54,3,2,1,3,2,2,2,1 +1,24,1,0,2325,2,4,2,3,1,3,3,32,1,2,1,3,2,1,2,1 +2,6,4,8,932,5,4,1,2,1,3,2,39,3,2,2,2,2,1,2,1 +3,24,4,3,3148,5,3,3,3,1,2,3,31,3,2,2,3,2,2,2,1 +4,36,2,3,3835,5,5,2,2,1,4,1,45,3,2,1,2,2,2,2,1 +4,9,2,6,3832,5,5,1,3,1,4,1,64,3,2,1,2,2,1,2,1 +2,24,2,3,5084,5,5,2,2,1,4,3,42,3,2,1,3,2,2,2,1 +4,9,4,2,2406,1,1,2,3,1,3,3,31,3,2,1,4,2,1,2,1 +4,36,2,3,2394,5,3,4,2,1,4,3,25,3,2,1,3,2,1,2,1 +4,21,2,1,2476,5,5,4,3,1,4,1,46,3,2,1,4,2,2,2,1 +1,24,2,1,2964,5,5,4,3,1,4,4,49,1,3,1,3,1,2,2,1 +1,12,2,2,1262,5,5,2,1,1,4,2,49,3,2,1,2,2,2,2,1 +4,12,2,9,1542,1,4,2,3,1,4,3,36,3,2,1,3,2,2,2,1 +2,24,4,3,1743,1,5,4,3,1,2,2,48,3,2,2,2,2,1,2,1 +3,12,1,3,409,4,3,3,2,1,3,1,42,3,1,2,3,2,1,2,1 +4,12,2,3,2171,1,2,2,2,1,2,3,29,1,2,1,3,2,1,2,1 +4,48,4,1,8858,5,4,2,3,1,1,4,35,3,3,2,3,2,2,2,1 +2,24,2,0,3512,2,4,2,3,1,3,3,38,1,2,2,3,2,2,2,1 +2,12,2,3,1158,3,3,3,1,1,1,3,26,3,2,1,3,2,2,2,1 +4,24,4,3,2684,1,3,4,3,1,2,1,35,3,2,2,2,2,1,2,1 +1,12,2,3,1498,1,3,4,2,1,1,3,23,1,2,1,3,2,1,2,1 +4,30,4,3,5954,1,4,3,3,2,2,3,38,3,2,1,3,2,1,2,1 +2,48,1,9,6416,1,5,4,2,1,3,4,59,3,1,1,3,2,1,2,0 +2,12,4,2,3617,1,5,1,3,1,4,3,28,3,1,3,3,2,2,2,1 +4,12,4,3,1291,1,3,4,2,1,2,2,35,3,2,2,3,2,1,2,1 +3,24,4,9,1275,4,3,2,1,1,4,1,36,3,2,2,3,2,2,2,1 +4,24,2,2,3972,1,4,2,2,1,4,2,25,3,1,1,3,2,2,2,1 +4,15,4,2,3343,1,3,4,3,1,2,4,28,3,3,1,3,2,2,2,1 +3,15,2,6,392,1,2,4,2,1,4,2,23,3,1,1,3,2,2,2,1 +4,9,4,0,2134,1,3,4,3,1,4,3,48,3,2,3,3,2,2,2,1 +4,30,4,3,5771,1,4,4,2,1,2,3,25,3,2,2,3,2,1,2,1 +4,24,4,9,4526,1,3,3,3,1,2,1,74,3,2,1,4,2,2,2,1 +4,15,4,2,2788,1,4,2,2,2,3,3,24,1,2,2,3,2,1,2,1 +4,6,4,3,1382,1,3,1,2,1,1,3,28,3,2,2,3,2,2,2,1 +3,36,2,3,5848,1,3,4,3,1,1,3,24,3,2,1,3,2,1,2,1 +1,12,2,0,1228,1,3,4,2,1,2,1,24,3,2,1,2,2,1,2,0 +3,12,2,3,1297,1,3,3,4,1,4,1,23,3,1,1,3,2,1,2,1 +4,24,2,3,1552,1,4,3,3,1,1,3,32,1,2,1,3,1,1,2,1 +4,12,2,3,1963,1,4,4,3,1,2,3,31,3,1,2,4,1,2,2,1 +4,24,2,3,3235,3,5,3,1,1,2,3,36,3,2,1,4,2,2,2,1 +4,24,4,9,4139,2,3,3,3,1,3,2,27,3,2,2,2,2,2,2,1 +2,12,4,1,1804,2,2,3,3,1,4,2,44,3,2,1,3,2,1,2,1 +4,18,2,9,1950,1,4,4,3,1,1,3,34,2,2,2,3,2,2,2,1 +4,48,3,3,12749,3,4,4,3,1,1,3,37,3,2,1,4,2,2,2,1 +4,9,2,4,1236,1,2,1,2,1,4,1,23,3,1,1,3,2,2,2,1 +4,18,4,0,1055,1,2,4,2,1,1,2,30,3,2,2,3,2,1,2,1 +1,30,0,9,8072,5,2,2,3,1,3,3,25,1,2,3,3,2,1,2,1 +4,30,4,3,2831,1,3,4,2,1,2,3,33,3,2,1,3,2,2,2,1 +4,9,2,9,1449,1,4,3,2,1,2,3,27,3,2,2,3,2,1,2,1 +4,36,2,9,5742,2,4,2,3,1,2,3,31,3,2,2,3,2,2,2,1 +4,12,2,0,2390,5,5,4,3,1,3,3,50,3,2,1,3,2,2,2,1 +4,24,2,3,3430,3,5,3,3,1,2,3,31,3,2,1,3,1,2,2,1 +2,36,2,6,2273,1,4,3,3,1,1,3,32,3,2,2,3,1,1,2,1 +3,21,2,0,2923,2,3,1,2,1,1,3,28,1,2,1,4,2,2,2,1 +4,24,2,3,1901,2,3,4,3,1,4,3,29,3,1,1,4,2,2,2,1 +2,36,2,6,3711,5,3,2,4,1,2,3,27,3,2,1,3,2,1,2,1 +2,48,2,0,8487,5,4,1,2,1,2,3,24,3,2,1,3,2,1,2,1 +4,24,2,0,2255,5,2,4,3,1,1,2,54,3,2,1,3,2,1,2,1 +4,12,2,3,1262,1,3,3,3,1,2,3,25,3,2,1,3,2,1,2,1 +4,33,4,1,7253,1,4,3,3,1,2,3,35,3,2,2,4,2,2,2,1 +4,6,4,0,6761,1,4,1,3,1,3,4,45,3,2,2,4,1,2,2,1 +4,18,4,2,1817,1,3,4,2,1,2,4,28,3,2,2,3,2,1,2,1 +4,12,2,3,2141,2,4,3,3,1,1,4,35,3,2,1,3,2,1,2,1 +4,48,1,9,3609,1,3,1,2,1,1,1,27,2,2,1,3,2,1,2,1 +4,30,2,3,2333,3,5,4,3,1,2,3,30,1,2,1,4,2,1,2,1 +4,28,1,1,7824,5,2,3,3,3,4,1,40,1,1,2,3,1,2,2,1 +3,18,1,3,1445,5,4,4,3,1,4,3,49,1,2,1,2,2,1,2,1 +1,24,2,2,7721,5,2,1,2,1,2,2,30,3,2,1,3,2,2,1,1 +1,21,2,0,3763,5,4,2,3,2,2,1,24,3,2,1,2,2,1,1,1 +2,18,2,9,4439,1,5,1,3,2,1,1,33,1,2,1,4,2,2,2,1 +1,12,2,3,1107,1,3,2,3,1,2,1,20,3,1,1,4,1,2,2,1 +2,15,2,3,1444,5,2,4,3,1,1,2,23,3,2,1,3,2,1,2,1 +2,48,1,0,12169,5,1,4,3,2,4,4,36,3,3,1,4,2,2,2,1 +4,9,2,3,2753,2,5,3,3,2,4,3,35,3,2,1,3,2,2,2,1 +3,4,2,0,1494,5,2,1,3,1,2,1,29,3,2,1,2,1,1,1,1 +1,24,1,2,2828,3,3,4,3,1,4,1,22,2,2,1,3,2,2,2,1 +1,24,1,2,2483,3,3,4,3,1,4,1,22,2,2,1,3,2,2,2,1 +3,6,4,0,1299,1,3,1,3,1,1,1,74,3,2,3,1,1,1,1,1 +2,9,2,0,1549,5,2,4,3,1,2,1,35,3,2,1,1,2,1,2,1 +3,10,2,0,3949,1,2,1,3,3,1,2,37,3,2,1,2,1,1,2,1 +4,10,2,1,2901,5,2,1,2,1,4,1,31,3,1,1,3,2,1,2,1 +3,6,2,0,709,4,2,2,4,1,2,1,27,3,2,1,1,2,1,1,1 +1,47,2,0,10722,1,2,1,2,1,1,1,35,3,2,1,2,2,2,2,1 +4,10,2,0,1287,5,5,4,3,2,2,2,45,3,2,1,2,2,1,1,1 +1,18,1,3,1940,1,2,3,3,2,4,4,36,1,3,1,4,2,2,2,1 +3,30,4,3,3656,5,5,4,3,1,4,2,49,2,2,2,2,2,1,2,1 +4,24,3,1,4679,1,4,3,3,1,3,3,35,3,2,2,2,2,2,2,1 +4,27,3,1,8613,4,3,2,3,1,2,3,27,3,2,2,3,2,1,2,1 +1,18,2,2,2659,4,3,4,3,1,2,3,28,3,2,1,3,2,1,2,1 +4,24,4,3,1516,4,3,4,2,1,1,1,43,3,2,2,2,2,1,2,1 +1,18,2,0,4380,2,3,3,3,1,4,3,35,3,2,1,2,1,2,2,1 +4,14,3,0,802,1,3,4,3,1,2,3,27,3,2,2,2,2,1,2,1 +4,21,2,9,1572,4,5,4,2,1,4,1,36,1,2,1,2,2,1,2,1 +2,48,1,9,3566,2,4,4,3,1,2,3,30,3,2,1,3,2,1,2,1 +4,24,2,3,1278,1,5,4,3,1,1,1,36,3,2,1,4,2,2,2,1 +4,6,0,3,426,1,5,4,4,1,4,3,39,3,2,1,2,2,1,2,1 +4,39,2,1,8588,2,5,4,3,1,2,3,45,3,2,1,4,2,2,2,1 +1,30,2,1,3857,1,3,4,1,1,4,2,40,3,2,1,4,2,2,2,1 +2,12,2,0,685,1,4,2,4,1,3,3,25,1,2,1,2,2,1,2,0 +1,24,2,3,1603,1,5,4,2,1,4,3,55,3,2,1,3,2,1,2,1 +4,21,2,2,2241,1,5,4,3,1,2,1,50,3,2,2,3,2,1,2,1 +1,24,2,3,2384,1,5,4,3,1,4,1,64,1,1,1,2,2,1,2,1 +4,4,2,2,601,1,2,1,2,1,3,1,23,3,1,1,2,1,1,2,1 +4,39,2,1,2569,3,3,4,3,1,4,3,24,3,2,1,3,2,1,2,1 +4,15,4,3,1316,3,3,2,4,1,2,2,47,3,2,2,2,2,1,2,1 +4,60,2,0,10366,1,5,2,3,1,4,2,42,3,2,1,4,2,2,2,1 +4,18,2,9,1568,2,3,3,2,1,4,2,24,3,1,1,2,2,1,2,1 +4,18,4,3,629,3,5,4,3,1,3,2,32,1,2,2,4,2,2,2,1 +4,6,1,3,1750,3,5,2,3,1,4,2,45,1,2,1,2,1,1,2,1 +4,24,2,1,3488,2,4,3,2,1,4,3,23,3,2,1,3,2,1,2,1 +4,18,4,3,1800,1,3,4,3,1,2,3,24,3,2,2,3,2,1,2,1 +4,24,3,2,4151,2,3,2,3,1,3,2,35,3,2,2,3,2,1,2,1 +2,15,2,5,2631,2,3,3,2,1,2,1,25,3,2,1,2,2,1,2,1 +4,21,2,1,5248,5,3,1,3,1,3,3,26,3,2,1,3,2,1,2,1 +2,18,3,0,2899,5,5,4,3,1,4,3,43,3,2,1,3,1,1,2,1 +2,18,3,5,6204,1,3,2,3,1,4,1,44,3,2,1,2,1,2,2,1 +4,12,2,3,804,1,5,4,3,1,4,3,38,3,2,1,3,2,1,2,1 +4,36,2,3,3595,1,5,4,3,1,2,3,28,3,2,1,3,2,1,2,1 +4,36,4,1,5711,4,5,4,3,1,2,3,38,3,2,2,4,2,2,2,1 +3,15,2,9,2687,1,4,2,3,1,4,2,26,3,1,1,3,2,2,2,1 +1,15,3,2,3643,1,5,1,2,1,4,2,27,3,2,2,2,2,1,2,1 +4,10,4,2,2146,1,2,1,2,1,3,1,23,3,1,2,3,2,1,2,1 +1,10,2,3,2315,1,5,3,3,1,4,1,52,3,2,1,2,2,1,2,1 +4,5,2,9,3448,1,4,1,3,1,4,1,74,3,2,1,2,2,1,2,1 +4,15,2,2,2708,1,2,2,3,1,3,2,27,1,2,2,2,2,1,2,1 +4,11,4,0,1393,1,2,4,2,1,4,3,35,3,2,2,4,2,1,2,1 +3,10,2,2,1275,1,2,4,2,1,2,2,23,3,2,1,3,2,1,2,1 +4,9,2,2,1313,1,5,1,3,1,4,3,20,3,2,1,3,2,1,2,1 +4,12,2,3,1493,1,2,4,2,1,3,3,34,3,2,1,3,1,1,2,1 +4,22,2,3,2675,3,5,3,3,1,4,3,40,3,2,1,3,2,1,2,1 +2,9,2,3,2118,1,3,2,3,1,2,1,37,3,2,1,2,1,1,2,1 +4,36,2,0,909,3,5,4,3,1,4,2,36,3,2,1,3,2,1,2,1 +4,12,4,2,1258,1,2,2,2,1,4,2,22,3,1,2,2,2,1,2,1 +4,15,1,3,1569,2,5,4,3,1,4,3,34,1,2,1,2,1,1,2,1 +4,6,2,1,1236,3,3,2,3,1,4,2,50,3,1,1,3,2,1,2,1 +4,36,3,2,7678,3,4,2,2,1,4,3,40,3,2,2,3,2,2,2,1 +4,6,2,5,660,3,4,2,4,1,4,1,23,3,1,1,2,2,1,2,1 +4,24,2,2,2835,3,5,3,3,1,4,2,53,3,2,1,3,2,1,2,1 +4,24,2,1,2670,1,5,4,3,1,4,3,35,3,2,1,4,2,2,2,1 +4,12,1,8,3447,3,3,4,2,1,3,1,35,3,2,1,2,1,1,2,1 +4,15,2,3,3568,1,5,4,2,1,2,3,54,1,1,1,4,2,2,2,1 +2,21,4,9,3652,1,4,2,3,1,3,2,27,3,2,2,3,2,1,2,1 +1,24,2,3,3660,1,3,2,2,1,4,3,28,3,2,1,3,2,1,2,1 +3,9,2,3,1126,2,5,2,1,1,4,1,49,3,2,1,3,2,1,2,1 +3,6,3,3,683,1,2,2,2,1,1,2,29,1,2,1,3,2,1,2,1 +3,12,2,2,2251,1,3,1,2,1,2,3,46,3,2,1,2,2,1,2,1 +4,12,2,1,4675,5,2,1,2,1,4,3,20,3,1,1,3,2,1,2,1 +2,21,3,0,2353,1,3,1,1,1,4,2,47,3,2,2,3,2,1,2,1 +1,21,2,3,3357,4,2,4,2,1,2,3,29,1,2,1,3,2,1,2,1 +4,6,2,0,672,1,1,1,2,1,4,1,54,3,2,1,1,2,2,2,1 +1,6,4,3,338,3,5,4,3,1,4,3,52,3,2,2,3,2,1,2,1 +4,9,2,3,2697,1,3,1,3,1,2,1,32,3,2,1,3,1,1,2,1 +4,9,2,0,2507,3,5,2,3,1,4,4,51,3,3,1,2,2,1,2,1 +4,15,3,3,1478,1,3,4,4,1,3,1,33,1,2,2,3,2,1,2,1 +4,12,4,6,3565,5,2,2,3,1,1,2,37,3,2,2,2,1,1,2,1 +4,15,2,2,2221,3,3,2,2,1,4,3,20,3,1,1,3,2,1,2,1 +4,6,4,3,1898,5,3,1,3,1,2,1,34,3,2,2,2,1,1,2,1 +2,6,3,9,1449,2,5,1,1,1,2,3,31,1,2,2,3,1,1,2,1 +4,15,3,2,960,4,4,3,2,1,2,2,30,3,2,2,3,2,1,2,1 +4,36,2,1,8133,1,3,1,2,1,2,2,30,1,2,1,3,2,1,2,1 +4,9,2,2,2301,2,2,2,2,1,4,2,22,3,1,1,3,2,1,2,1 +4,6,3,9,1743,2,3,1,3,1,2,1,34,3,2,2,2,2,1,2,1 +2,12,2,2,983,4,2,1,2,1,4,1,19,3,1,1,2,2,1,2,1 +4,18,3,3,2320,1,1,2,4,1,3,1,34,3,2,2,3,2,1,2,1 +1,12,1,8,339,1,5,4,4,1,1,3,45,1,2,1,2,2,1,2,1 +3,24,2,3,5152,1,4,4,3,1,2,3,25,1,2,1,3,2,1,2,1 +3,24,2,2,3749,1,2,2,2,1,4,3,26,3,2,1,3,2,1,2,1 +4,9,4,3,3074,5,3,1,3,1,2,1,33,3,2,2,3,1,1,2,1 +3,9,2,3,745,1,3,3,2,1,2,1,28,3,2,1,2,2,1,2,0 +4,24,2,0,1469,2,5,4,4,1,4,1,41,3,1,1,2,2,1,2,1 +1,6,2,2,1374,1,3,1,3,1,2,1,36,1,2,1,2,2,2,2,1 +4,6,1,0,783,5,3,1,3,3,2,1,26,2,2,1,2,1,1,2,1 +1,21,2,3,2606,1,2,4,2,1,4,2,28,3,1,1,4,2,2,2,1 +4,54,0,1,9436,5,3,2,3,1,2,2,39,3,2,1,2,1,1,2,1 +4,12,4,3,930,5,5,4,3,1,4,1,65,3,2,4,3,2,1,2,1 +4,48,4,1,2751,5,5,4,3,1,3,3,38,3,2,2,3,1,2,2,1 +4,6,4,0,250,4,3,2,2,1,2,1,41,1,2,2,2,2,1,2,1 +2,24,2,0,1201,1,2,4,3,1,1,2,26,3,2,1,3,2,1,2,1 +1,6,2,0,662,1,2,3,3,1,4,1,41,3,2,1,2,1,2,2,1 +4,15,2,1,1300,5,5,4,3,1,4,4,45,1,3,1,3,1,1,2,1 +4,24,1,9,1559,1,4,4,3,1,4,3,40,1,2,1,3,2,2,2,1 +3,12,2,3,3016,1,3,3,4,1,1,3,24,3,2,1,3,2,1,2,1 +4,15,4,3,1360,1,3,4,3,1,2,2,31,3,2,2,3,2,1,2,1 +4,6,0,0,1204,2,3,4,3,1,1,4,35,1,1,1,3,2,1,1,1 +4,10,2,0,1597,3,3,3,3,1,2,4,40,3,1,1,2,1,1,1,1 +4,12,2,3,2073,2,3,4,2,2,2,1,28,3,2,1,3,2,1,2,1 +4,11,2,9,2142,4,5,1,1,1,2,1,28,3,2,1,3,2,2,2,1 +1,10,4,2,2132,5,2,2,2,2,3,1,27,3,1,2,3,2,1,1,1 +4,10,2,0,1546,1,3,3,3,1,2,1,31,3,2,1,2,1,1,1,1 +4,24,4,0,1287,4,5,4,2,1,4,1,37,3,2,2,3,2,2,2,1 +4,10,2,0,1418,2,3,3,3,1,2,1,35,3,1,1,2,2,1,1,1 +3,6,4,0,1343,1,5,1,3,1,4,1,46,3,2,2,3,1,1,1,1 +4,18,2,0,2662,5,4,4,3,1,3,2,32,3,2,1,3,2,1,1,1 +4,18,4,3,6070,1,5,3,3,1,4,3,33,3,2,2,3,2,2,2,1 +4,24,4,6,1927,5,3,3,2,1,2,3,33,3,2,2,3,2,2,2,1 +4,18,4,3,2404,1,3,2,2,1,2,3,26,3,2,2,3,2,1,2,1 +4,6,4,3,1554,1,4,1,2,1,2,3,24,3,1,2,3,2,2,2,1 +4,22,2,0,1283,5,4,4,2,1,4,2,25,3,1,1,3,2,1,2,1 +4,24,3,0,717,5,5,4,4,1,4,3,54,3,2,2,3,2,2,2,1 +1,24,2,2,1747,1,2,4,3,2,1,2,24,3,2,1,2,2,1,1,1 +1,9,4,5,1288,2,5,3,3,3,4,1,48,3,2,2,3,1,1,1,1 +1,10,4,0,1038,1,4,4,3,2,3,2,49,3,2,2,3,2,2,2,1 +4,10,2,1,2848,2,3,1,3,2,2,1,32,3,2,1,3,1,1,2,1 +4,12,2,1,1413,4,4,3,3,1,2,2,55,3,2,1,3,2,1,1,1 +4,30,4,3,3077,5,5,3,3,1,2,3,40,3,2,2,3,1,2,2,1 +1,24,1,1,3632,1,3,1,2,3,4,3,22,1,1,1,3,2,1,1,1 +4,18,4,1,3229,5,1,2,3,1,4,4,38,3,2,1,4,2,2,2,1 +4,9,2,0,3577,2,3,1,3,3,2,1,26,3,1,1,3,1,1,1,1 +4,12,4,0,682,2,4,4,2,1,3,3,51,3,2,2,3,2,2,2,1 +4,10,2,3,1924,1,3,1,3,1,4,2,38,3,2,1,3,2,2,1,1 +4,10,2,6,727,3,5,4,3,1,4,4,46,3,3,1,3,2,2,2,1 +3,10,4,0,781,1,5,4,3,1,4,4,63,3,3,2,3,2,2,2,1 +1,12,4,0,2121,1,3,4,3,1,2,2,30,3,2,2,3,2,1,2,1 +4,12,4,6,701,1,3,4,3,1,2,3,32,3,2,2,3,2,1,2,1 +4,10,4,2,2069,5,3,2,4,1,1,3,26,3,2,2,3,2,1,1,1 +4,24,2,0,1525,4,4,4,2,1,3,3,34,3,2,1,3,1,2,2,1 +4,48,4,9,7629,5,5,4,1,1,2,3,46,1,2,2,4,1,1,2,1 +1,12,4,0,3499,1,3,3,2,2,2,1,29,3,2,2,3,2,1,2,0 +4,6,2,3,1346,2,5,2,3,1,4,4,42,1,3,1,3,1,2,2,1 +4,36,4,1,10477,5,5,2,3,1,4,4,42,3,3,2,3,2,1,2,1 +1,24,2,1,2924,1,3,3,3,3,4,4,63,1,2,1,3,1,2,2,1 +4,10,4,0,1231,1,5,3,3,1,4,1,32,3,2,2,2,1,1,1,1 +3,18,2,0,1961,1,5,3,2,1,2,3,23,3,2,1,4,2,1,2,1 +4,15,4,0,5045,5,5,1,2,1,4,3,59,3,2,1,3,2,2,2,1 +4,12,4,0,1255,1,5,4,3,1,4,1,61,3,2,2,2,2,1,2,1 +1,12,2,2,1858,1,2,4,2,1,1,3,22,3,1,1,3,2,1,2,1 +4,6,4,2,1221,5,3,1,4,1,2,2,27,3,2,2,3,2,1,2,1 +4,9,2,2,1388,1,3,4,2,1,2,1,26,3,1,1,3,2,1,2,1 +4,12,2,3,2279,5,3,4,3,1,4,4,37,3,3,1,3,2,2,2,1 +4,12,0,2,2759,1,5,2,3,1,4,2,34,3,2,2,3,2,1,2,1 +3,24,2,3,1258,3,3,3,2,1,3,3,57,3,2,1,2,2,1,2,1 +2,12,0,8,1410,1,3,2,3,1,2,1,31,3,2,1,2,2,2,2,1 +1,15,2,0,1403,1,3,2,2,1,4,3,28,3,1,1,3,2,1,2,1 +1,24,2,2,3021,1,3,2,1,1,2,1,24,3,1,1,2,2,1,2,1 +1,24,2,9,6568,1,3,2,4,1,2,3,21,2,2,1,2,2,1,2,1 +4,24,4,3,2578,4,5,2,3,1,2,3,34,3,2,1,3,2,1,2,1 +2,24,4,1,7758,4,5,2,2,1,4,4,29,3,1,1,3,2,1,2,1 +1,6,2,4,343,1,2,4,2,1,1,1,27,3,2,1,3,2,1,2,1 +4,21,3,2,1591,2,4,4,3,1,3,1,34,3,2,2,4,2,1,2,1 +1,27,2,3,3416,1,3,3,3,1,2,3,27,3,2,1,4,2,1,2,1 +1,12,0,5,1108,1,4,4,3,1,3,1,28,3,2,2,3,2,1,2,0 +2,27,3,1,5965,1,5,1,3,1,2,3,30,3,2,2,4,2,2,2,1 +2,15,2,5,1514,2,3,4,3,3,2,1,22,3,2,1,3,2,1,2,1 +4,30,4,3,6742,5,4,2,3,1,3,2,36,3,2,2,3,2,1,2,1 +1,18,2,2,3650,1,2,1,2,1,4,3,22,3,1,1,3,2,1,2,1 +1,21,2,2,3599,1,4,1,2,1,4,3,26,3,1,1,2,2,1,2,1 +4,60,4,0,13756,5,5,2,3,1,4,4,63,1,3,1,4,2,2,2,1 +2,9,2,0,276,1,3,4,4,1,4,1,22,3,1,1,2,2,1,2,1 +4,42,4,2,4041,3,3,4,3,1,4,1,36,3,2,2,3,2,2,2,1 +2,9,2,3,458,1,3,4,3,1,3,1,24,3,2,1,3,2,1,2,1 +2,9,2,2,918,1,3,4,2,1,1,2,30,3,2,1,3,2,1,2,0 +4,24,2,0,7393,1,3,1,3,1,4,2,43,3,2,1,2,1,1,2,1 +3,10,2,4,1225,1,3,2,3,1,2,3,37,3,2,1,3,2,2,2,1 +1,24,2,1,2812,5,5,2,2,1,4,1,26,3,1,1,3,2,1,2,1 +4,15,2,1,3029,1,4,2,3,1,2,3,33,3,2,1,3,2,1,2,1 +3,12,4,0,1480,3,1,2,3,1,4,4,66,1,3,3,1,2,1,2,1 +3,6,4,6,1047,1,3,2,2,1,4,2,50,3,2,1,2,2,1,2,1 +4,15,4,3,1471,1,3,4,3,1,4,4,35,3,3,2,3,2,2,2,1 +4,24,2,2,5511,2,3,4,3,1,1,3,25,2,2,1,3,2,1,2,1 +2,9,2,3,1206,1,5,4,2,1,4,1,25,3,2,1,3,2,1,2,1 +2,24,3,3,6403,1,2,1,3,1,2,3,33,3,2,1,3,2,1,2,1 +4,12,2,3,707,1,3,4,3,1,2,1,30,1,2,2,3,2,1,2,1 +4,12,3,1,1503,1,3,4,4,1,4,1,41,3,1,1,3,2,1,2,1 +2,12,2,0,6078,1,4,2,3,1,2,3,32,3,2,1,3,2,1,2,1 +2,27,2,9,2528,1,2,4,2,1,1,2,32,3,2,1,3,1,2,2,1 +2,12,2,9,1037,2,4,3,3,1,4,1,39,3,2,1,2,2,1,2,1 +1,6,2,1,1352,3,1,1,2,1,2,2,23,3,1,1,1,2,2,2,1 +4,24,2,3,3181,1,2,4,2,1,4,2,26,3,2,1,3,2,2,2,1 +4,18,2,3,4594,1,2,3,3,1,2,3,32,3,2,1,3,2,2,2,1 +2,48,2,10,5381,5,1,3,3,1,4,4,40,1,3,1,1,2,2,2,1 +4,15,2,1,4657,1,3,3,3,1,2,3,30,3,2,1,3,2,2,2,1 +2,9,2,9,1391,1,3,2,4,1,1,1,27,1,2,1,3,2,2,2,1 +2,18,2,9,1913,4,2,3,4,1,3,1,36,1,2,1,3,2,2,2,1 +4,42,2,3,7166,5,4,2,4,1,4,2,29,3,1,1,3,2,2,2,1 +4,13,2,3,1409,2,1,2,2,1,4,1,64,3,2,1,3,2,1,2,1 +4,24,3,9,2978,5,3,4,3,1,4,1,32,3,2,2,3,1,2,2,1 +4,12,4,3,976,5,5,4,3,1,4,3,35,3,2,2,3,2,1,2,1 +4,24,3,9,2375,3,3,4,3,1,2,3,44,3,2,2,3,1,2,2,1 +4,12,4,3,522,3,5,4,3,1,4,2,42,3,2,2,3,1,2,2,1 +4,28,4,3,2743,1,5,4,3,1,2,3,29,3,2,2,3,2,1,2,1 +4,11,4,3,1154,2,1,4,2,1,4,1,57,3,2,3,2,2,1,2,1 +4,24,4,1,5804,4,3,4,3,1,2,1,27,3,2,2,3,2,1,2,1 +4,18,4,3,1169,5,3,4,3,1,3,2,29,3,2,2,3,2,2,2,1 +1,15,4,2,1478,1,5,4,3,1,4,3,44,3,2,2,3,1,2,2,1 +4,12,2,3,776,1,3,4,4,1,2,1,28,3,2,1,3,2,1,2,1 +2,11,4,0,1322,4,3,4,2,1,4,3,40,3,2,2,3,2,1,2,1 +2,16,4,0,1175,1,1,2,3,1,3,3,68,3,3,3,1,2,2,2,1 +4,12,2,0,2133,5,5,4,2,1,4,4,52,3,3,1,4,2,2,2,1 +4,15,4,3,1829,1,5,4,3,1,4,3,46,3,2,2,3,2,2,2,1 +4,12,4,3,717,1,5,4,3,1,4,1,52,3,2,3,3,2,1,2,1 +2,39,3,6,11760,2,4,2,3,1,3,4,32,3,1,1,3,2,2,2,1 +2,9,4,6,1501,1,5,2,2,1,3,3,34,3,2,2,4,2,2,2,0 +1,12,2,6,1200,5,3,4,2,1,4,2,23,1,1,1,3,2,2,2,1 +2,9,2,0,3195,5,3,1,2,1,2,1,33,3,2,1,2,2,1,2,1 +4,30,4,3,4530,1,4,4,2,1,4,3,26,3,1,1,4,2,2,2,1 +4,12,3,5,1555,4,5,4,3,1,4,4,55,3,3,2,3,1,1,2,0 +2,15,4,9,2326,3,3,2,3,1,4,3,27,1,2,1,3,2,1,2,1 +2,18,4,9,1887,5,3,4,4,1,4,1,28,1,2,2,3,2,1,2,1 +4,12,4,9,1264,5,5,4,3,1,4,4,57,3,1,1,2,2,1,2,1 +4,7,3,3,846,5,5,3,3,1,4,4,36,3,3,1,3,2,1,2,1 +4,15,4,6,1532,2,3,4,2,1,3,3,31,3,2,1,3,2,1,2,1 +4,6,3,3,935,1,3,3,2,1,2,1,24,3,2,1,3,2,1,2,1 +1,27,4,9,2442,1,5,4,3,1,4,3,43,2,2,4,4,1,2,2,1 +2,18,4,9,3590,1,1,3,4,1,3,3,40,3,2,3,1,1,2,2,1 +4,21,4,2,2288,1,2,4,2,1,4,2,23,3,2,1,3,2,2,2,1 +4,27,3,9,5117,1,4,3,3,1,4,3,26,3,2,2,3,2,1,2,1 +1,39,4,2,14179,5,4,4,3,1,4,2,30,3,2,2,4,2,2,2,1 +4,15,2,3,1386,5,3,4,4,1,2,1,40,3,1,1,3,2,2,2,1 +4,12,4,3,618,1,5,4,3,1,4,1,56,3,2,1,3,2,1,2,1 +4,12,2,2,1574,1,3,4,3,1,2,1,50,3,2,1,3,2,1,2,1 +4,6,4,3,700,5,5,4,3,1,4,4,36,3,3,2,3,2,1,2,1 +4,12,2,3,886,5,3,4,2,1,2,3,21,3,2,1,3,2,1,2,1 +4,36,2,1,4686,1,3,2,3,1,2,4,32,3,3,1,4,2,2,2,1 +2,9,2,3,790,3,3,4,2,1,3,1,66,3,2,1,2,2,1,2,1 +2,12,2,3,766,3,3,4,3,1,3,1,66,3,2,1,2,2,1,2,0 +1,20,2,2,2212,5,4,4,3,1,4,3,39,3,2,1,3,2,2,2,1 +2,10,2,0,7308,1,1,2,3,1,4,4,70,1,3,1,4,2,2,2,1 +2,24,4,6,5743,1,2,2,2,1,4,4,24,3,3,2,3,2,2,2,1 +1,14,2,0,3973,1,1,1,3,1,4,4,22,3,3,1,3,2,1,2,1 +2,60,3,3,7418,5,3,1,3,1,1,1,27,3,2,1,2,2,1,2,1 +2,20,3,10,2629,1,3,2,3,1,3,3,29,1,2,2,3,2,2,2,1 +2,18,2,9,1941,4,3,4,3,1,2,2,35,3,2,1,2,2,2,2,1 +2,24,3,2,2333,5,2,4,3,1,2,2,29,1,2,1,2,2,1,2,1 +4,12,2,1,2445,5,2,2,4,1,4,3,26,3,1,1,3,2,2,2,1 +2,20,2,1,6468,5,1,1,1,1,4,1,60,3,2,1,4,2,2,2,1 +2,18,4,2,7374,1,1,4,3,1,4,2,40,2,2,2,4,2,2,2,1 +4,15,2,1,3812,2,2,1,2,1,4,3,23,3,2,1,3,2,2,2,1 +1,28,2,0,4006,1,3,3,3,1,2,3,45,3,2,1,2,2,1,2,0 +2,12,2,0,7472,5,1,1,2,1,2,1,24,3,1,1,1,2,1,2,1 +3,12,2,2,1424,5,5,3,2,1,4,1,55,3,2,1,4,2,2,2,1 +2,12,2,1,2028,5,3,4,3,1,2,3,30,3,2,1,3,2,1,2,1 +4,15,2,0,5324,3,5,1,2,1,4,4,35,3,3,1,3,2,1,2,1 +2,36,2,3,2323,1,4,4,3,1,4,3,24,3,1,1,3,2,1,2,1 +4,12,2,6,1393,1,5,4,3,1,4,2,47,1,2,3,3,1,2,2,1 +4,18,2,2,1984,1,3,4,3,1,4,4,47,1,3,2,3,2,1,2,1 +4,24,2,3,999,5,5,4,3,1,2,3,25,3,2,2,3,2,1,2,1 +4,36,2,9,7409,5,5,3,3,1,2,2,37,3,2,2,3,2,1,2,1 +4,15,2,2,2186,5,4,1,2,1,4,1,33,1,1,1,2,2,1,2,1 +3,36,2,3,4473,1,5,4,3,1,2,3,31,3,2,1,3,2,1,2,1 +4,24,2,8,937,1,2,4,4,1,3,3,27,3,2,2,2,2,1,2,1 +4,18,2,2,3422,1,5,4,3,1,4,2,47,1,2,3,3,1,2,2,1 +4,24,2,3,3105,5,2,4,3,1,2,3,25,3,2,2,3,2,1,2,1 +4,12,4,6,2748,1,5,2,2,1,4,4,57,1,3,3,2,2,1,2,1 +2,18,2,5,3872,1,1,2,2,1,4,3,67,3,2,1,3,2,2,2,1 +4,27,2,5,5190,5,5,4,3,1,4,2,48,3,2,4,3,1,2,2,1 +2,18,2,2,3001,1,4,2,2,1,4,1,40,3,1,1,3,2,1,2,1 +4,24,3,9,3863,1,3,1,3,1,2,4,32,3,3,1,3,2,1,2,1 +4,12,4,2,5801,5,5,2,3,1,4,2,49,3,1,1,3,2,2,1,1 +4,12,4,2,1592,4,4,3,2,1,2,2,35,3,2,1,3,2,1,2,1 +4,12,4,9,1185,1,3,3,2,1,2,1,27,3,2,2,3,2,1,2,1 +4,18,4,2,3780,1,2,3,1,1,2,3,35,3,2,2,4,2,2,2,1 +2,18,4,2,3612,1,5,3,2,1,4,2,37,3,2,1,3,2,2,1,1 +4,12,2,9,1076,1,3,2,4,1,2,1,26,3,2,1,3,2,2,2,1 +4,12,2,0,3527,5,2,2,3,1,3,2,45,3,2,1,4,1,2,2,1 +4,18,2,3,2051,1,2,4,3,1,1,1,33,3,2,1,3,2,1,2,1 +4,12,4,2,3331,1,5,2,3,1,4,2,42,2,2,1,3,2,1,2,1 +1,18,0,9,3104,1,4,3,3,1,1,2,31,1,2,1,3,2,2,2,1 +4,24,4,3,2611,1,5,4,4,2,3,1,46,3,2,2,3,2,1,2,1 +1,12,4,1,1409,1,5,4,3,1,3,1,54,3,2,1,3,2,1,2,1 +4,24,2,3,1311,2,4,4,4,1,3,2,26,3,2,1,3,2,2,2,1 +4,6,2,3,2108,1,4,2,4,1,2,1,29,3,1,1,3,2,1,2,1 +4,24,4,1,4042,5,4,3,3,1,4,2,43,3,2,2,3,2,2,2,1 +4,12,4,0,926,1,1,1,2,1,2,2,38,3,2,1,1,2,1,2,1 +1,12,2,3,1680,3,5,3,4,1,1,1,35,3,2,1,3,2,1,2,1 +4,24,2,0,1249,1,2,4,4,1,2,1,28,3,2,1,3,2,1,2,1 +4,24,4,0,2463,2,4,4,4,1,3,2,27,3,2,2,3,2,2,2,1 +4,6,2,3,1595,1,4,3,3,1,2,2,51,3,2,1,3,1,1,2,1 +4,24,4,5,2058,1,3,4,1,1,2,1,33,3,2,2,3,2,2,2,1 +4,24,2,1,7814,1,4,3,3,1,3,3,38,3,2,1,4,2,2,2,1 +4,6,4,3,1740,1,5,2,4,1,2,1,30,3,1,2,3,2,1,2,1 +4,12,4,3,1240,5,5,4,2,1,2,1,38,3,2,2,3,2,2,2,1 +4,24,4,1,6842,5,3,2,3,1,4,2,46,3,2,2,4,1,2,2,1 +4,24,4,2,5150,1,5,4,3,1,4,3,33,3,2,1,3,2,2,2,1 +1,6,2,0,1203,2,5,3,3,1,2,2,43,3,2,1,3,2,2,2,1 +4,6,4,0,2080,3,3,1,4,1,2,3,24,3,2,1,3,2,1,2,1 +4,6,2,6,1538,1,2,1,2,1,2,4,56,3,2,1,3,2,1,2,1 +2,24,4,0,3878,2,2,4,1,1,2,3,37,3,2,1,3,2,2,2,1 +2,30,2,2,3832,1,2,2,4,1,1,2,22,3,2,1,3,2,1,2,1 +4,15,2,0,3186,4,4,2,2,1,3,3,20,3,1,1,3,2,1,2,1 +2,24,2,3,2896,2,2,2,3,1,1,3,29,3,2,1,3,2,1,2,1 +2,24,3,9,6967,2,4,4,3,1,4,3,36,3,1,1,4,2,2,2,1 +4,36,2,6,1819,1,3,4,3,1,4,4,37,2,3,1,3,2,2,2,0 +4,24,2,3,5943,5,2,1,2,1,1,3,44,3,2,2,3,2,2,2,0 +4,36,4,2,7127,1,2,2,2,1,4,2,23,3,1,2,3,2,2,2,0 +4,36,2,2,3349,1,3,4,2,1,2,3,28,3,2,1,4,2,2,2,0 +4,36,2,2,10974,1,1,4,2,1,2,3,26,3,2,2,4,2,2,2,0 +4,6,2,3,518,1,3,3,2,1,1,1,29,3,2,1,3,2,1,2,1 +4,18,2,3,1126,5,2,4,2,1,2,1,21,3,1,1,3,2,2,2,1 +2,12,4,1,1860,1,1,4,3,1,2,3,34,3,2,2,4,2,2,2,1 +4,36,4,3,9566,1,3,2,2,1,2,3,31,2,2,2,3,2,1,2,1 +1,12,2,3,701,1,3,4,4,1,2,1,40,3,2,1,2,2,1,2,1 +2,12,2,3,2930,1,4,2,2,1,1,1,27,3,2,1,3,2,1,2,1 +4,18,2,3,1505,1,3,4,3,1,2,4,32,3,3,1,4,2,2,2,1 +4,18,4,3,2238,1,3,2,2,1,1,3,25,3,2,2,3,2,1,2,1 +4,4,4,3,1503,1,4,2,3,1,1,1,42,3,2,2,2,1,1,2,1 +4,24,4,1,2197,5,4,4,3,1,4,3,43,3,2,2,3,1,2,2,1 +3,12,2,3,1881,1,3,2,2,1,2,3,44,3,1,1,2,2,2,2,1 +1,18,4,3,1880,1,4,4,4,1,1,2,32,3,2,2,4,2,2,2,1 +1,18,2,3,2389,1,2,4,2,1,1,3,27,2,2,1,3,2,1,2,1 +2,24,2,3,1967,1,5,4,2,1,4,3,20,3,2,1,3,2,2,2,1 +4,4,4,0,3380,1,4,1,2,1,1,1,37,3,2,1,3,1,1,2,1 +4,4,4,0,1455,1,4,2,3,1,1,1,42,3,2,3,2,1,1,2,1 +4,7,4,3,730,5,5,4,3,1,2,2,46,3,1,2,2,2,2,2,1 +2,18,0,2,3244,1,3,1,2,1,4,3,33,1,2,2,3,2,2,2,1 +2,9,2,3,1670,1,2,4,2,1,2,3,22,3,2,1,3,2,2,2,0 +2,48,2,3,3979,5,4,4,3,1,1,3,41,3,2,2,3,1,2,2,1 +2,12,2,2,1922,1,3,4,3,1,2,2,37,3,2,1,2,2,1,2,0 +2,18,4,2,1295,1,2,4,2,1,1,2,27,3,2,2,3,2,1,2,1 +4,4,4,3,1544,1,4,2,3,1,1,1,42,3,2,3,2,1,1,2,1 +2,8,2,9,907,1,2,3,4,1,2,1,26,3,2,1,3,2,2,2,1 +2,30,2,3,1715,5,3,4,2,1,1,3,26,3,2,1,3,2,1,2,1 +3,10,4,3,1347,5,4,4,3,1,2,2,27,3,2,2,3,2,2,2,1 +2,12,2,0,1007,4,3,4,4,1,1,1,22,3,2,1,3,2,1,2,1 +4,12,4,2,1402,3,4,3,2,1,4,3,37,3,1,1,3,2,2,2,1 +2,12,2,0,2002,1,4,3,3,1,4,2,30,3,1,1,3,1,2,2,1 +4,12,4,6,2096,1,4,2,3,1,3,1,49,3,2,1,2,1,1,2,1 +4,12,2,0,1101,1,3,3,4,1,2,1,27,3,2,2,3,2,2,2,1 +4,10,2,8,894,5,4,4,2,1,3,2,40,3,2,1,3,2,2,2,1 +2,11,2,2,1577,4,2,4,2,1,1,1,20,3,2,1,3,2,1,2,1 +4,33,3,9,2764,1,3,2,2,1,2,3,26,3,2,2,3,2,2,2,1 +2,48,0,0,8358,3,2,1,2,1,1,3,30,3,2,2,3,2,1,2,1 +3,12,2,2,1474,1,2,4,2,1,1,2,33,1,2,1,4,2,2,2,1 +4,24,2,1,5433,5,1,2,2,1,4,2,26,3,1,1,4,2,2,2,1 +2,14,2,9,1410,3,5,1,4,1,2,1,35,3,2,1,3,2,2,2,1 +4,20,4,0,3485,5,2,2,1,1,4,1,44,3,2,2,3,2,2,2,1 +4,18,4,1,3850,1,4,3,3,1,1,3,27,3,2,2,3,2,1,2,1 +2,60,2,0,7408,2,2,4,2,1,2,2,24,3,2,1,4,2,1,2,0 +3,24,2,3,1377,2,5,4,2,1,2,4,47,3,3,1,3,2,2,2,1 +4,30,3,9,4272,2,3,2,3,1,2,2,26,3,2,2,2,2,1,2,1 +2,24,3,3,1553,2,4,3,2,1,2,2,23,3,1,2,3,2,2,2,1 +2,36,3,9,9857,2,4,1,3,1,3,2,31,3,2,2,2,1,2,2,1 +4,6,4,0,362,2,3,4,2,1,4,3,52,3,2,2,2,2,1,2,1 +4,12,4,2,1935,1,5,4,3,1,4,1,43,3,2,3,3,2,2,2,1 +4,48,2,3,10222,5,4,4,3,1,3,3,37,2,2,1,3,2,2,2,1 +3,12,2,0,1330,1,2,4,3,1,1,1,26,3,2,1,3,2,1,2,1 +4,36,2,6,9055,5,3,2,3,1,4,4,35,3,3,1,2,1,2,2,1 +2,26,2,1,7966,1,2,2,3,1,3,3,30,3,2,2,3,2,1,2,1 +2,30,1,2,3496,4,3,4,3,1,2,3,34,2,2,1,3,1,2,2,1 +2,36,2,1,6948,1,3,2,3,1,2,3,35,3,1,1,4,2,2,2,1 +2,48,0,9,12204,5,3,2,3,1,2,3,48,1,2,1,4,2,2,2,1 +1,36,2,2,3446,1,5,4,3,1,2,3,42,3,2,1,3,1,1,2,0 +1,12,2,6,684,1,3,4,3,1,4,3,40,3,1,1,2,1,1,2,0 +1,33,4,2,4281,3,3,1,2,1,4,3,23,3,2,2,3,2,1,2,0 +1,42,2,3,7174,5,4,4,2,1,3,3,30,3,2,1,4,2,2,2,0 +1,24,1,3,1546,1,4,4,3,3,4,3,24,1,1,1,2,2,1,2,0 +1,24,2,2,2359,2,1,1,1,1,1,2,33,3,2,1,3,2,1,2,0 +4,24,2,3,3621,2,5,2,3,1,4,3,31,3,2,2,3,2,1,2,0 +1,12,2,4,741,2,1,4,2,1,3,2,22,3,2,1,3,2,1,2,0 +1,12,2,2,7865,1,5,4,3,1,4,4,53,3,3,1,4,2,2,2,0 +1,24,2,1,2910,1,4,2,3,1,1,4,34,3,3,1,4,2,2,2,1 +1,18,4,0,5302,1,5,2,3,1,4,4,36,3,3,3,4,2,2,2,1 +1,36,2,2,3620,1,3,1,3,3,2,2,37,3,2,1,3,1,1,2,1 +1,18,2,3,3509,1,4,4,2,3,1,1,25,3,2,1,3,2,1,2,1 +2,12,2,2,3017,1,2,3,2,1,1,1,34,3,1,1,4,2,1,2,1 +1,12,2,2,1657,1,3,2,3,1,2,1,27,3,2,1,3,2,1,2,1 +1,8,4,10,1164,1,5,3,3,1,4,4,51,1,3,2,4,1,2,2,1 +1,36,4,2,6229,1,2,4,2,2,4,4,23,3,1,2,2,2,2,2,0 +1,24,1,0,1193,1,1,1,2,2,4,4,29,3,1,2,1,2,1,2,0 +1,30,0,2,4583,1,3,2,1,3,2,1,32,3,2,2,3,2,1,2,1 +1,36,4,2,5371,1,3,3,3,3,2,2,28,3,2,2,3,2,1,2,1 +1,12,2,2,708,1,3,2,3,3,3,2,38,3,2,1,2,1,1,2,1 +1,21,4,0,571,1,5,4,3,1,4,1,65,3,2,2,3,2,1,2,1 +1,30,2,3,2522,1,5,1,3,3,3,2,39,3,2,1,3,1,1,2,1 +1,36,2,2,5179,1,4,4,3,1,2,2,29,3,2,1,3,2,1,2,0 +1,36,2,1,8229,1,3,2,3,1,2,2,26,3,2,1,3,1,1,2,0 +4,24,4,2,2028,1,4,2,3,1,2,2,30,3,2,2,2,2,1,2,1 +1,6,2,0,1374,5,1,4,2,1,3,2,75,3,2,1,4,2,2,2,1 +1,12,2,2,1289,1,3,4,3,3,1,2,21,3,2,1,2,2,1,2,1 +1,36,2,2,2712,1,5,2,3,1,2,2,41,1,2,1,3,1,1,2,0 +1,15,4,2,975,1,3,2,1,1,3,2,25,3,2,2,3,2,1,2,1 +2,6,3,2,1050,1,1,4,3,1,1,2,35,2,2,2,4,2,2,1,1 +1,6,4,0,609,1,4,4,2,1,3,2,37,3,2,2,3,2,1,2,1 +1,48,2,1,4788,1,4,4,3,1,3,2,26,3,2,1,3,1,1,2,1 +2,24,2,2,3069,2,5,4,3,1,4,4,30,3,3,1,3,2,1,2,1 +2,12,2,0,836,2,2,4,2,1,2,2,23,1,2,1,2,2,1,2,0 +1,12,2,2,2577,1,3,2,1,1,1,3,42,3,2,1,3,2,1,2,1 +1,12,2,2,1620,1,3,2,2,2,3,2,30,3,2,1,3,2,1,2,1 +1,15,2,2,1845,1,2,4,2,3,1,2,46,3,1,1,3,2,1,2,1 +1,24,2,1,6579,1,1,4,3,1,2,4,29,3,3,1,4,2,2,2,1 +1,12,2,0,1893,1,3,4,2,3,4,2,29,3,2,1,3,2,2,2,1 +1,30,4,1,10623,1,5,3,3,1,4,4,38,3,3,3,4,1,2,2,1 +1,18,2,0,2249,2,4,4,3,1,3,3,30,3,2,1,4,1,2,2,1 +1,30,2,2,3108,1,2,2,1,1,4,2,31,3,2,1,2,2,1,2,0 +2,12,4,0,958,1,4,2,3,1,3,1,47,3,2,2,2,1,1,2,1 +4,24,2,1,9277,5,3,2,1,1,4,4,48,3,3,1,3,2,2,2,1 +4,24,4,10,6314,1,1,4,3,2,2,4,27,1,2,2,4,2,2,2,1 +1,12,4,1,1526,1,5,4,3,1,4,4,66,3,3,2,4,2,1,2,1 +1,12,2,2,3590,1,3,2,3,2,2,2,29,3,2,1,2,1,1,2,1 +1,24,4,1,6615,1,1,2,3,1,4,4,75,3,3,2,4,2,2,2,1 +1,6,4,2,1872,1,1,4,3,1,4,4,36,3,3,3,4,2,2,2,1 +4,12,2,0,2859,5,1,4,3,1,4,4,38,3,2,1,4,2,2,2,1 +4,18,4,3,1582,4,5,4,3,1,4,3,46,3,2,2,3,2,1,2,1 +4,6,2,8,1238,5,1,4,3,1,4,2,36,3,2,1,4,1,2,2,1 +1,12,2,2,2578,1,1,3,2,1,4,4,55,3,3,1,4,2,1,2,1 +1,15,4,2,1433,1,3,4,2,1,3,2,25,3,1,2,3,2,1,2,1 +1,42,2,2,7882,1,4,2,3,3,4,2,45,3,3,1,3,1,1,2,1 +1,24,2,2,4169,1,3,4,3,1,4,2,28,3,2,1,3,2,1,2,1 +1,36,2,2,3959,1,1,4,3,1,3,2,30,3,2,1,4,2,2,2,1 +1,36,2,0,3249,1,4,2,3,1,4,4,39,1,3,1,4,1,2,2,1 +1,24,2,2,3149,1,2,4,3,1,1,4,22,1,3,1,3,2,1,2,1 +1,12,4,2,2246,1,5,3,3,1,3,2,60,3,2,2,3,2,1,2,0 +1,13,4,9,1797,1,2,3,3,1,1,2,28,1,2,2,2,2,1,2,1 +1,20,4,2,4272,1,5,1,2,1,4,2,24,3,2,2,3,2,1,2,1 +1,24,4,1,2957,1,5,4,3,1,4,2,63,3,2,2,3,2,2,2,1 +1,36,4,2,2348,1,3,3,4,1,2,2,46,3,2,2,3,2,2,2,1 +3,42,0,9,6289,1,2,2,1,1,1,2,33,3,2,2,3,2,1,2,1 +1,24,4,1,6419,1,5,2,2,1,4,4,44,3,3,2,4,1,2,2,1 +1,48,4,1,6143,1,5,4,2,1,4,4,58,2,3,2,2,2,1,2,0 +4,24,4,6,1597,1,5,4,3,1,4,4,54,3,3,2,3,1,1,2,1 +1,36,2,10,15857,1,1,2,1,2,3,3,43,3,2,1,4,2,1,2,1 +4,24,4,3,2223,2,5,4,3,1,4,2,52,1,2,2,3,2,1,2,1 +4,48,3,3,7238,5,5,3,3,1,3,3,32,1,2,2,3,1,1,2,1 +2,30,3,9,2503,2,5,4,3,1,2,2,41,2,2,2,3,2,1,2,1 +2,18,2,9,2622,2,3,4,3,1,4,3,34,3,2,1,3,2,1,2,1 +2,24,2,2,4351,5,3,1,2,1,4,2,48,3,2,1,2,2,2,2,1 +2,6,2,3,368,5,5,4,3,1,4,2,38,3,2,1,3,2,1,2,1 +2,12,2,8,754,5,5,4,3,1,4,2,38,3,2,2,3,2,1,2,1 +4,24,4,3,2424,5,5,4,3,1,4,2,53,3,2,2,3,2,1,2,1 +2,48,3,9,6681,5,3,4,3,1,4,4,38,3,3,1,3,1,2,2,1 +2,18,3,9,2427,5,5,4,3,1,2,2,42,3,2,2,3,2,1,2,1 +2,24,4,3,1216,2,2,4,3,1,4,4,38,1,2,2,3,1,1,2,0 +2,6,2,3,753,1,3,2,2,3,3,1,64,3,2,1,3,2,1,2,1 +2,7,2,3,2576,1,3,2,3,3,2,1,35,3,2,1,3,2,1,1,1 +2,6,2,3,590,1,2,3,4,1,3,1,26,3,2,1,2,2,1,1,1 +2,8,2,3,1414,1,3,4,3,3,2,1,33,3,2,1,3,2,1,1,1 +2,12,2,3,1103,1,4,4,3,3,3,1,29,3,2,2,3,2,1,2,1 +2,12,3,3,585,1,3,4,4,2,4,1,20,3,1,2,3,2,1,2,1 +2,6,2,3,1068,1,5,4,3,1,4,3,28,3,2,1,3,1,1,2,1 +1,8,4,0,713,1,5,4,3,1,4,1,47,3,2,2,2,2,1,2,1 +2,12,2,3,1092,1,3,4,2,3,4,1,49,3,2,2,3,2,2,2,1 +2,7,2,3,2329,1,2,1,2,3,1,1,45,3,2,1,3,2,1,2,1 +2,13,4,3,882,1,2,4,3,3,4,1,23,3,2,2,3,2,1,2,1 +2,18,2,3,866,1,3,4,4,3,2,1,25,3,2,1,2,2,1,2,1 +2,7,2,3,2415,1,3,3,3,3,2,1,34,3,2,1,3,2,1,2,1 +2,13,2,3,2101,1,2,2,2,3,4,2,23,3,2,1,2,2,1,2,1 +2,18,2,3,1301,1,5,4,4,3,2,1,32,3,2,1,2,2,1,2,1 +2,18,2,3,1113,1,3,4,2,3,4,1,26,3,2,1,2,1,1,2,1 +2,8,2,3,760,1,4,4,2,3,2,1,44,3,2,1,2,2,1,2,1 +2,12,2,3,625,1,2,4,4,3,1,1,26,1,2,1,2,2,1,2,1 +3,6,4,0,1323,2,5,2,1,1,4,3,28,3,2,2,3,1,2,2,1 +1,9,4,3,1138,1,3,4,3,1,4,1,25,3,2,2,2,2,1,2,1 +2,18,4,3,1795,1,5,3,2,3,4,1,48,1,1,2,2,2,2,2,1 +2,15,4,3,2728,5,4,4,3,3,2,1,35,1,2,3,3,2,2,2,1 +2,6,2,3,484,1,4,3,4,3,3,1,28,1,2,1,2,2,1,2,1 +2,10,1,3,1048,1,3,4,3,1,4,1,23,2,2,1,2,2,1,2,1 +2,12,2,3,1155,1,5,3,4,3,3,1,40,1,2,2,2,2,1,2,1 +2,20,3,1,7057,5,4,3,3,1,4,2,36,1,1,2,4,1,2,2,1 +2,15,4,3,1537,5,5,4,3,3,4,1,50,3,2,2,3,2,2,2,1 +1,12,2,3,2214,1,3,4,3,1,3,2,24,3,2,1,2,2,1,2,1 +4,24,4,2,1585,1,4,4,3,1,3,2,40,3,2,2,3,2,1,2,1 +2,10,2,2,1521,1,3,4,1,1,2,3,31,3,2,1,2,2,1,2,1 +2,36,1,4,3990,5,2,3,2,1,2,4,29,1,2,1,1,2,1,2,1 +3,18,2,2,3049,1,2,1,2,1,1,2,45,2,2,1,2,2,1,2,1 +1,24,2,3,1282,2,3,4,2,1,2,3,32,3,2,1,2,2,1,2,0 +4,60,2,3,10144,2,4,2,2,1,4,1,21,3,2,1,3,2,2,2,1 +1,12,2,0,1168,1,3,4,4,1,3,1,27,3,2,1,2,2,1,2,1 +2,6,2,5,454,1,2,3,4,1,1,2,22,3,2,1,2,2,1,2,1 +4,15,3,1,3594,1,2,1,2,1,2,2,46,3,2,2,2,2,1,2,1 +4,12,2,2,1768,1,3,3,3,1,2,1,24,3,1,1,2,2,1,2,1 +4,60,3,3,15653,1,4,2,3,1,4,3,21,3,2,2,3,2,2,2,1 +3,12,3,0,2247,1,3,2,2,1,2,3,36,2,2,2,3,2,2,2,1 +4,24,2,3,1413,1,3,4,4,1,2,2,28,3,2,1,3,2,1,2,1 +2,45,4,1,4576,2,1,3,3,1,4,3,27,3,2,1,3,2,1,2,1 +1,24,4,3,1231,4,5,4,2,1,4,2,57,3,1,2,4,2,2,2,1 +1,36,2,1,8335,5,5,3,3,1,4,4,47,3,3,1,3,2,1,2,0 +2,36,4,1,5800,1,3,3,3,1,4,3,34,3,2,2,3,2,2,2,1 +1,18,3,6,8471,5,3,1,2,1,2,3,23,3,1,2,3,2,2,2,1 +1,30,2,2,3622,4,5,4,2,1,4,2,57,3,1,2,3,2,2,2,1 +1,6,4,3,1169,5,5,4,3,1,4,1,67,3,2,2,3,2,2,2,1 +4,15,2,4,1262,3,4,4,3,1,3,2,36,3,2,2,3,2,2,2,1 +3,24,4,2,3617,5,5,4,3,2,4,4,20,3,1,2,3,2,1,2,1 +2,30,4,0,2181,5,5,4,3,1,4,1,36,3,2,2,3,2,1,2,1 +1,48,1,9,7685,1,4,2,2,3,4,3,37,3,1,1,3,2,1,2,0 +4,48,4,6,6110,1,3,1,3,1,3,4,31,1,3,1,3,2,2,2,1 +4,24,2,0,3757,1,5,4,2,2,4,4,62,3,3,1,3,2,2,2,1 +1,42,4,5,3394,1,1,4,3,2,4,3,65,3,2,2,1,2,1,2,1 +4,36,4,9,6304,5,5,4,3,1,4,1,36,3,2,2,3,2,1,2,1 +4,9,4,6,1244,5,5,4,2,1,4,2,41,3,1,2,2,2,1,2,1 +4,6,2,0,3518,1,3,2,3,3,3,2,26,3,1,1,3,2,1,2,1 +4,36,0,5,2613,1,3,4,3,1,2,3,27,3,2,2,3,2,1,2,1 +1,48,2,6,7476,1,4,4,3,1,1,4,50,3,3,1,4,2,2,2,1 +4,24,2,9,4591,4,3,2,3,1,3,2,54,3,2,3,4,2,2,2,0 +2,18,2,2,1924,5,2,4,2,1,3,1,27,3,1,1,3,2,1,2,0 +2,72,2,3,5595,2,3,2,4,1,2,3,24,3,2,1,3,2,1,2,0 +2,48,3,6,6224,1,5,4,3,1,4,4,50,3,3,1,3,2,1,2,0 +3,15,2,6,1905,1,5,4,3,1,4,3,40,3,1,1,4,2,2,2,1 +4,21,3,1,2993,1,3,3,3,1,2,1,28,2,2,2,2,2,1,2,1 +4,36,3,1,8947,5,4,3,3,1,2,3,31,2,2,1,4,1,2,2,1 +1,24,2,2,4020,1,3,2,3,1,2,3,27,2,2,1,3,2,1,2,1 +2,18,2,1,2779,1,3,1,4,1,3,3,21,3,1,1,3,2,2,2,1 +4,21,2,0,2782,3,4,1,2,1,2,3,31,1,2,1,4,2,1,2,1 +4,12,2,0,1884,1,5,4,3,1,4,3,39,3,2,1,4,2,2,2,1 +4,36,4,1,11054,5,3,4,3,1,2,3,30,3,2,1,4,2,2,2,1 +2,60,3,3,9157,5,3,2,3,1,2,4,27,3,3,1,4,2,1,2,1 +2,42,1,1,9283,1,1,1,3,1,2,4,55,1,3,1,4,2,2,2,1 +4,60,2,0,6527,5,3,4,3,1,4,4,34,3,3,1,3,1,2,2,1 +4,15,4,1,3368,4,5,3,3,1,4,4,23,3,1,2,3,2,2,2,1 +1,15,2,0,2511,1,1,1,2,1,4,3,23,3,1,1,3,2,1,2,1 +1,36,2,1,5493,1,5,2,3,1,4,4,42,3,3,1,3,1,1,2,1 +4,6,2,4,1338,3,3,1,1,1,4,1,62,3,2,1,3,2,1,2,1 +2,9,2,3,1082,1,5,4,3,1,4,3,37,3,2,2,2,2,1,2,1 +4,18,4,3,1149,4,3,4,3,1,3,1,46,3,2,2,3,2,1,2,1 +2,15,2,5,1308,1,5,4,3,1,4,3,38,3,2,2,2,2,1,2,1 +2,20,0,1,6148,2,5,3,4,1,4,3,31,1,2,2,3,2,2,2,1 +4,12,2,2,1736,1,4,3,2,1,4,1,31,3,2,1,2,2,1,2,1 +4,12,2,3,3059,4,4,2,1,1,4,1,61,3,2,1,2,2,1,2,1 +1,24,2,2,2996,5,3,2,4,1,4,3,20,3,2,1,3,2,1,2,0 +4,30,4,1,7596,5,5,1,3,1,4,3,63,3,2,2,3,2,1,2,1 +4,30,2,1,4811,5,4,2,2,1,4,2,24,2,1,1,2,2,1,2,1 +4,6,2,2,1766,1,3,1,4,1,2,2,21,3,1,1,3,2,1,2,1 +2,24,2,1,2760,5,5,4,3,1,4,4,36,1,3,1,3,2,2,2,1 +4,24,4,5,5507,1,5,3,3,1,4,4,44,3,3,2,3,2,1,2,1 +2,9,2,6,1199,1,4,4,2,1,4,2,67,3,2,2,4,2,2,2,1 +3,24,2,2,2892,1,5,3,1,1,4,4,51,3,3,1,3,2,1,2,1 +2,36,3,0,2862,2,5,4,3,1,3,4,30,3,3,1,3,2,1,2,1 +1,9,2,0,654,1,3,4,3,1,3,3,28,3,2,1,2,2,1,2,0 +2,9,4,6,1136,4,5,4,3,1,3,4,32,3,3,2,3,1,1,2,0 +2,24,2,1,4113,3,2,3,2,1,4,3,28,3,1,1,3,2,1,2,0 +2,6,2,0,14555,5,1,1,3,1,2,2,23,3,2,1,1,2,2,2,0 +1,15,0,0,950,1,5,4,3,1,3,3,33,3,1,2,3,1,1,2,0 +1,24,4,0,1199,1,5,4,3,1,4,3,60,3,2,2,2,2,1,2,0 +1,12,0,0,1082,1,3,4,3,1,4,3,48,1,2,2,3,2,1,2,0 +2,30,2,0,2150,1,3,4,2,3,2,4,24,1,2,1,3,2,1,2,0 +2,36,4,0,2820,1,2,4,1,1,4,3,27,3,2,2,3,2,1,2,0 +2,48,2,3,3060,1,4,4,3,1,4,1,28,3,2,2,3,2,1,2,0 +1,18,2,3,2600,1,3,4,3,1,4,4,65,3,3,2,3,2,1,2,0 +4,21,0,0,5003,5,3,1,2,1,4,2,29,1,2,2,3,2,2,2,0 +2,60,2,6,6288,1,3,4,3,1,4,4,42,3,3,1,3,2,1,2,0 +4,24,3,0,2538,1,5,4,3,1,4,3,47,3,2,2,2,1,1,2,0 +4,9,2,3,1478,1,4,4,3,1,2,3,22,3,2,1,3,2,1,2,0 +2,39,4,3,4933,1,4,2,3,3,2,1,25,3,2,2,3,2,1,2,0 +4,18,4,0,1530,1,3,3,3,1,2,2,32,1,2,2,3,2,1,2,0 +2,9,1,0,1437,2,4,2,3,1,3,4,29,3,2,1,3,2,1,2,0 +1,15,2,4,1275,5,3,4,2,1,2,3,24,3,1,1,3,2,1,2,0 +1,24,2,3,1823,1,1,4,3,1,2,3,30,2,2,1,4,1,1,2,0 +1,9,2,0,1422,1,2,3,3,1,2,4,27,3,3,1,4,2,2,2,0 +1,18,2,4,1217,1,3,4,4,1,3,1,47,3,2,1,2,2,2,2,0 +1,36,2,0,9271,1,4,2,3,1,1,3,24,3,2,1,3,2,2,2,0 +1,36,3,9,2145,1,4,2,3,1,1,3,24,3,2,2,3,2,2,2,0 +1,36,2,0,1842,1,2,4,2,1,4,3,34,3,2,1,3,2,2,2,0 +2,18,3,2,4297,1,5,4,1,1,3,4,40,3,2,1,4,2,2,2,0 +1,6,4,2,3384,1,3,1,1,1,4,1,44,3,1,1,4,2,2,2,0 +2,18,4,3,1245,1,3,4,4,1,2,3,33,3,2,1,3,2,1,2,0 +4,15,2,6,4623,2,3,3,3,1,2,2,40,3,2,1,4,2,2,2,0 +2,30,4,2,8386,1,4,2,3,1,2,2,49,3,2,1,3,2,1,2,0 +1,24,3,3,1024,1,2,4,4,1,4,1,48,2,2,1,3,2,1,2,0 +2,36,2,0,14318,1,5,4,3,1,2,4,57,3,3,1,4,2,2,2,0 +2,6,1,6,433,4,2,4,2,1,2,2,24,1,1,1,3,1,1,2,0 +1,12,1,3,2149,1,3,4,1,1,1,4,29,3,3,1,3,2,1,2,0 +4,24,2,3,2397,3,5,3,3,1,2,3,35,1,2,2,3,2,2,2,0 +2,6,1,0,931,2,2,1,2,1,1,2,32,2,2,1,2,2,1,2,0 +2,15,3,5,1512,4,3,3,4,1,3,2,61,2,2,2,3,2,1,2,0 +2,24,0,9,4241,1,3,1,3,1,4,1,36,3,2,3,2,2,2,2,0 +2,24,4,2,4736,1,2,2,2,1,4,3,25,1,2,1,2,2,1,2,0 +2,15,0,0,1778,1,2,2,2,1,1,1,26,3,1,2,1,2,1,2,0 +3,15,2,3,2327,1,2,2,2,1,3,1,25,3,2,1,2,2,1,2,0 +1,24,1,2,6872,1,2,2,1,1,1,2,55,1,2,1,3,2,2,2,0 +1,12,2,6,795,1,2,4,2,1,4,2,53,3,2,1,3,2,1,2,0 +3,30,3,9,1908,1,5,4,3,1,4,1,66,3,2,1,4,2,2,2,0 +2,36,0,9,1953,1,5,4,3,1,4,4,61,3,3,1,4,2,2,2,0 +3,18,2,2,2864,1,3,2,3,1,1,1,34,3,2,1,2,1,1,2,0 +3,21,4,6,2319,1,2,2,1,1,1,3,33,3,1,1,3,2,1,2,0 +1,24,2,0,915,5,5,4,2,1,2,3,29,1,2,1,3,2,1,2,0 +3,24,2,0,947,1,4,4,3,1,3,4,38,1,3,1,3,1,1,2,0 +1,24,2,0,1381,5,3,4,2,1,2,2,35,3,2,1,3,2,1,2,0 +1,24,2,0,1285,5,4,4,2,1,4,4,32,3,1,1,3,2,1,2,0 +1,24,2,0,1371,5,3,4,2,1,4,1,25,3,1,1,3,2,1,2,0 +2,18,2,0,1042,5,3,4,2,1,2,2,33,3,2,1,3,2,1,2,0 +1,12,2,0,900,5,3,4,4,1,2,3,23,3,2,1,3,2,1,2,0 +1,24,2,0,1207,1,2,4,2,1,4,2,24,3,1,1,3,2,1,2,0 +2,18,0,0,2278,2,2,3,2,1,3,3,28,3,2,2,3,2,1,2,0 +1,60,3,9,6836,1,5,3,3,1,4,4,63,3,2,2,3,2,2,2,0 +1,24,2,2,3345,1,5,4,3,1,2,2,39,3,1,1,4,2,2,2,0 +1,6,1,6,1198,1,5,4,2,1,4,4,35,3,3,1,3,2,1,2,0 +2,48,2,9,15672,1,3,2,3,1,2,3,23,3,2,1,3,2,2,2,0 +1,60,2,9,7297,1,5,4,3,2,4,4,36,3,1,1,3,2,1,2,0 +4,18,2,5,1943,1,2,4,2,1,4,1,23,3,2,1,3,2,1,2,0 +1,18,2,3,3190,1,3,2,2,1,2,1,24,3,2,1,3,2,1,2,0 +2,9,1,1,5129,1,5,2,2,1,4,4,74,1,3,1,4,1,2,2,0 +4,18,3,2,1808,1,4,4,2,1,1,1,22,3,2,1,3,2,1,2,0 +3,10,2,0,1240,2,5,1,2,1,4,4,48,3,3,1,2,1,1,2,0 +1,12,2,0,759,1,4,4,3,1,2,1,26,3,2,1,3,2,1,2,0 +2,8,2,2,1237,1,3,3,2,1,4,1,24,3,2,1,3,2,1,2,0 +4,9,2,2,1980,1,2,2,2,2,2,3,19,3,1,2,3,2,1,2,0 +2,48,2,3,10961,4,4,1,3,2,2,4,27,1,2,2,3,2,2,2,0 +1,36,3,6,6887,1,3,4,3,1,3,2,29,2,2,1,3,2,2,2,0 +1,24,2,3,1938,1,2,4,1,1,3,2,32,3,2,1,3,2,1,2,0 +1,21,2,3,1835,1,3,3,2,1,2,1,25,3,2,2,3,2,2,2,0 +1,24,3,3,1659,1,2,4,2,1,2,3,29,3,1,1,2,2,2,2,0 +2,6,3,0,1209,1,1,4,3,1,4,2,47,3,2,1,4,2,2,2,0 +2,48,0,9,3844,2,4,4,3,1,4,4,34,3,3,1,2,1,1,2,0 +1,12,4,0,4843,1,5,3,3,2,4,2,43,3,1,2,3,2,2,2,0 +2,12,2,5,639,1,3,4,3,1,2,3,30,3,2,1,3,2,1,2,0 +2,48,2,3,5951,1,3,2,2,1,2,1,22,3,2,1,3,2,1,2,0 +2,36,0,3,3804,1,3,4,2,1,1,3,42,3,2,1,3,2,2,2,0 +4,36,3,3,4463,1,3,4,3,1,2,3,26,3,2,2,4,2,2,2,0 +4,36,3,9,7980,5,2,4,3,1,4,3,27,3,1,2,3,2,2,2,0 +3,36,2,3,4210,1,3,4,3,1,2,3,26,3,2,1,3,2,1,2,0 +4,6,2,2,4611,1,2,1,2,1,4,2,32,3,2,1,3,2,1,2,0 +2,24,2,1,11560,1,3,1,2,1,4,3,23,3,1,2,4,2,1,2,0 +4,18,0,9,4165,1,3,2,3,1,2,3,36,2,2,2,3,1,1,2,0 +2,24,2,2,4057,1,4,3,1,1,3,3,43,3,2,1,3,2,2,2,0 +4,18,1,0,6458,1,5,2,3,1,4,4,39,1,2,2,4,1,2,2,0 +4,12,2,0,1386,3,3,2,2,1,2,2,26,3,2,1,3,2,1,2,0 +1,36,2,6,1977,5,5,4,3,1,4,4,40,3,2,1,4,2,2,2,0 +2,18,4,2,1928,1,2,2,3,1,2,1,31,3,2,2,2,2,1,2,0 +4,12,2,2,1123,3,3,4,2,1,4,3,29,3,1,1,2,2,1,2,0 +2,24,2,10,11328,1,3,2,3,2,3,3,29,1,2,2,4,2,2,2,0 +2,24,4,10,11938,1,3,2,3,2,3,3,39,3,2,2,4,1,2,2,0 +2,27,4,3,2520,3,3,4,3,1,2,2,23,3,2,2,2,2,1,2,0 +2,30,3,3,1919,2,2,4,3,1,3,4,30,2,2,2,4,2,1,2,0 +2,60,1,10,14782,2,5,3,2,1,4,4,60,1,3,2,4,2,2,2,0 +2,36,2,3,2671,2,3,4,2,2,4,4,50,3,3,1,3,2,1,2,0 +2,36,2,6,12612,2,3,1,3,1,4,4,47,3,3,1,3,1,2,2,0 +2,45,2,3,3031,2,3,4,3,3,4,2,21,3,1,1,3,2,1,2,0 +1,12,1,3,626,1,3,4,2,1,4,1,24,1,2,1,2,2,1,2,0 +2,24,4,9,1935,1,5,4,1,1,4,1,31,3,2,2,3,2,2,2,0 +3,24,4,0,1344,5,4,4,3,1,2,1,37,1,2,2,2,1,1,2,0 +4,18,2,2,1533,1,2,4,4,2,1,2,43,3,2,1,2,1,1,2,0 +1,48,2,0,3931,1,4,4,3,1,4,4,46,3,3,1,3,1,1,2,0 +1,24,1,2,3349,3,2,4,3,1,4,4,30,3,3,1,3,1,2,2,0 +1,36,2,3,2302,1,3,4,1,1,4,3,31,3,1,1,3,2,1,2,0 +1,42,2,3,3965,1,2,4,3,1,3,3,34,3,2,1,3,2,1,2,0 +1,12,2,3,727,2,2,4,4,1,3,4,33,3,2,1,2,2,2,2,0 +4,48,2,9,3914,5,3,4,1,1,2,1,38,1,2,1,3,2,1,2,0 +1,48,2,9,4308,1,2,3,2,1,4,2,24,3,1,1,3,2,1,2,0 +2,12,2,3,1534,1,2,1,4,1,1,1,23,3,1,1,3,2,1,2,0 +4,18,4,0,2775,1,4,2,3,1,2,2,31,1,2,2,3,2,1,2,0 +1,40,4,6,5998,1,3,4,3,1,3,4,27,1,2,1,3,2,2,2,0 +3,15,4,3,1271,5,3,3,3,1,4,4,39,3,3,2,3,2,2,2,0 +2,12,2,0,1295,1,2,3,2,1,1,3,25,3,1,1,3,2,1,2,0 +2,36,2,1,9398,1,2,1,4,1,4,3,28,3,1,1,4,2,2,2,0 +2,12,2,2,951,2,2,4,2,1,4,3,27,1,1,4,3,2,1,2,0 +2,24,2,0,1355,1,2,3,2,1,4,3,25,3,2,1,2,2,2,2,0 +1,48,2,4,3051,1,3,3,3,1,4,3,54,3,2,1,3,2,1,2,0 +4,36,4,0,7855,1,3,4,2,1,2,1,25,2,2,2,3,2,2,2,0 +4,18,2,3,433,1,1,3,2,2,4,1,22,3,1,1,3,2,1,2,0 +4,36,3,9,9572,1,2,1,1,1,1,3,28,3,2,2,3,2,1,2,0 +2,24,1,6,1837,1,4,4,2,1,4,4,34,1,3,1,2,2,1,2,0 +2,30,4,0,4249,1,1,4,4,1,2,3,28,3,2,2,4,2,1,2,0 +2,30,4,0,5234,1,1,4,4,1,2,3,28,3,2,2,4,2,1,2,0 +1,48,2,3,6758,1,3,3,2,1,2,3,31,3,2,1,3,2,2,2,0 +1,9,2,3,1366,1,2,3,2,1,4,2,22,3,1,1,3,2,1,2,0 +1,24,1,10,1358,5,5,4,3,1,3,3,40,2,2,1,4,2,2,2,0 +1,18,2,2,2473,1,1,4,3,1,1,3,25,3,2,1,1,2,1,2,0 +3,9,0,3,1337,1,2,4,3,1,2,3,34,3,2,2,4,2,2,2,0 +1,48,2,0,7763,1,5,4,3,1,4,4,42,1,3,1,4,2,1,2,0 +2,15,1,0,1264,2,3,2,4,1,2,2,25,3,1,1,3,2,1,2,0 +2,15,2,0,2631,2,3,2,2,1,4,3,28,3,1,2,3,2,2,2,0 +2,48,2,0,6560,2,4,3,3,1,2,2,24,3,2,1,3,2,1,2,0 +1,24,2,0,3123,1,2,4,2,1,1,2,27,3,2,1,3,2,1,2,0 +1,36,4,6,8065,1,3,3,2,1,2,4,25,3,2,2,4,2,2,2,0 +1,24,2,3,2439,1,2,4,2,1,4,1,35,3,2,1,3,2,2,2,0 +2,36,2,2,9034,2,2,4,3,2,1,4,29,3,1,1,4,2,2,2,0 +2,60,2,0,14027,1,4,4,3,1,2,4,27,3,2,1,4,2,2,2,0 +1,36,4,1,9629,1,4,4,3,1,4,3,24,3,2,2,3,2,2,2,0 +2,12,2,3,1484,5,3,2,4,1,1,1,25,3,2,1,3,2,2,2,0 +1,18,2,2,1131,1,1,4,2,1,2,3,33,3,2,1,3,2,1,2,0 +2,24,3,2,2064,1,1,3,2,1,2,2,34,3,2,1,4,2,2,2,0 +2,18,2,1,12976,1,1,3,2,1,4,4,38,3,3,1,4,2,2,2,0 +4,21,3,9,2580,3,2,4,3,1,2,1,41,1,2,1,2,1,1,2,0 +4,27,2,0,2570,1,3,3,2,1,3,1,21,3,1,1,3,2,1,2,0 +2,27,2,9,3915,1,3,4,3,1,2,3,36,3,2,1,3,1,2,2,0 +4,10,2,0,1309,5,3,4,3,3,4,2,27,3,2,1,2,2,1,2,0 +1,24,2,0,4817,1,4,2,3,2,3,2,31,3,2,1,3,2,2,2,0 +1,12,2,0,2579,1,2,4,3,1,1,1,33,3,2,1,2,1,1,2,0 +2,36,3,0,2225,1,5,4,3,1,4,4,57,1,3,2,3,2,2,2,0 +1,18,2,2,4153,1,3,2,3,2,3,3,42,3,2,1,3,2,1,2,0 +1,18,0,2,3114,1,2,1,2,1,4,2,26,3,1,1,3,2,1,2,0 +1,18,4,2,2124,1,3,4,2,1,4,1,24,3,1,2,3,2,1,2,0 +1,18,1,2,1553,1,3,4,3,1,3,3,44,1,2,1,3,2,1,2,0 +1,30,2,2,2406,1,4,4,2,1,4,1,23,3,1,1,3,2,1,2,0 +1,24,3,0,1333,1,1,4,3,1,2,1,43,3,3,2,3,1,1,2,0 +1,48,0,2,7119,1,3,3,3,1,4,4,53,3,3,2,3,1,1,2,0 +1,24,3,0,4870,1,3,3,3,1,4,4,53,3,3,2,3,1,1,2,0 +1,12,4,0,691,1,5,4,3,1,3,2,35,3,2,2,3,2,1,2,0 +1,42,3,3,4370,1,4,3,3,1,2,2,26,1,2,2,3,1,2,2,0 +1,36,1,2,2746,1,5,4,3,1,4,3,31,1,2,1,3,2,1,2,0 +1,24,0,2,4110,1,5,3,3,1,4,4,23,1,1,2,3,1,1,2,0 +1,18,2,2,2462,1,3,2,3,1,2,3,22,3,2,1,3,2,1,2,0 +1,12,2,2,1282,1,3,2,2,1,4,3,20,3,1,1,3,2,1,2,0 +2,12,0,2,2969,1,2,4,2,1,3,2,25,3,1,2,3,2,1,2,0 +1,48,0,1,4605,1,5,3,3,1,4,4,24,3,3,2,3,1,1,2,0 +1,48,4,1,6331,1,5,4,3,1,4,4,46,3,3,2,3,2,2,2,0 +1,24,1,2,3552,1,4,3,3,1,4,3,27,1,2,1,3,2,1,2,0 +1,12,1,0,697,1,2,4,3,1,2,3,46,1,2,2,3,2,2,2,0 +1,24,2,0,1442,1,4,4,2,1,4,3,23,3,1,2,3,2,1,2,0 +1,27,0,9,5293,1,1,2,3,1,4,2,50,3,2,2,3,2,2,2,0 +1,21,3,6,3414,1,2,2,3,1,1,2,26,2,2,2,3,2,1,2,0 +1,18,2,2,2039,1,3,1,2,1,4,1,20,3,1,1,3,2,1,2,0 +1,24,1,9,3161,1,3,4,3,1,2,2,31,1,1,1,3,2,2,2,0 +1,12,2,8,902,1,4,4,4,1,4,2,21,3,1,1,3,2,1,2,0 +1,48,2,1,10297,1,4,4,3,1,4,4,39,3,3,3,3,1,2,2,0 +2,48,0,9,14421,1,3,2,3,1,2,3,25,2,2,1,3,2,2,2,0 +2,18,4,0,1056,1,5,3,3,3,3,1,30,3,2,2,3,2,1,2,0 +1,12,2,0,1274,1,2,3,2,1,1,1,37,1,2,1,2,2,1,2,0 +2,12,2,0,1223,1,5,1,1,1,1,1,46,3,1,2,3,2,1,2,0 +1,12,2,0,1372,1,4,2,1,1,3,3,36,3,2,1,3,2,1,2,0 +1,16,4,0,2625,1,5,2,3,3,4,2,43,3,1,1,3,2,2,1,0 +1,20,4,0,2235,1,3,4,4,3,2,2,33,1,1,2,3,2,1,1,0 +2,9,2,2,959,1,3,1,2,1,2,3,29,1,2,1,3,2,1,2,0 +2,18,4,0,884,1,5,4,3,1,4,3,36,3,2,1,3,1,2,2,0 +2,24,2,0,1246,1,2,4,3,1,2,1,23,1,2,1,2,2,1,2,0 +2,36,3,0,8086,2,5,2,3,1,4,3,42,2,2,4,4,2,2,2,0 +4,48,4,0,10127,3,3,2,3,1,2,4,44,3,3,1,3,2,1,2,0 +2,12,2,0,888,1,5,4,3,1,4,3,41,1,2,1,2,1,1,2,0 +4,12,2,6,719,1,5,4,3,1,4,3,41,1,2,1,2,1,1,2,0 +2,36,2,0,12389,5,3,1,3,1,4,4,37,1,3,1,3,2,2,2,0 +1,12,2,3,709,1,5,4,3,1,4,1,57,3,2,1,2,2,1,2,0 +2,15,1,0,6850,2,1,1,3,1,2,2,34,2,2,1,4,1,2,2,0 +4,10,2,2,2210,1,3,2,3,1,2,1,25,3,1,1,2,2,1,2,0 +4,30,1,1,7485,5,1,4,2,1,1,1,53,1,2,1,4,2,2,2,0 +1,18,1,0,1442,1,4,4,3,1,4,4,32,1,3,2,2,1,1,2,0 +4,12,4,3,797,5,5,4,2,1,3,2,33,3,2,1,2,1,1,2,0 +2,45,4,3,4746,1,2,4,3,1,2,2,24,1,2,2,2,2,1,2,0 +3,12,4,0,939,3,4,4,4,1,2,1,28,3,2,3,3,2,2,2,0 +2,21,2,9,1188,1,5,2,2,1,4,2,39,3,2,1,3,1,1,2,0 +4,48,4,1,11590,2,3,2,2,1,4,3,24,3,1,2,2,2,1,2,0 +3,12,1,9,609,1,2,4,2,1,1,1,26,1,2,1,1,2,1,2,0 +1,18,4,5,1190,1,1,2,2,1,4,4,55,3,3,3,1,1,1,2,0 +2,21,2,9,2767,2,5,4,1,1,2,3,61,3,1,2,2,2,1,2,0 +2,30,2,2,3441,2,3,2,2,2,4,3,21,1,1,1,3,2,1,2,0 +2,30,0,9,4280,2,3,4,2,1,4,3,26,3,1,2,2,2,1,2,0 +2,24,2,3,3092,2,2,3,4,1,2,3,22,3,1,1,3,2,2,2,0 +4,18,2,0,6761,5,3,2,3,1,4,3,68,2,1,2,3,2,1,2,0 +2,12,2,3,1331,1,2,2,3,1,1,3,22,3,2,1,3,2,1,2,0 +2,54,0,9,15945,1,2,3,3,1,4,4,58,3,1,1,3,2,2,2,0 +1,24,2,2,3234,1,2,4,2,1,4,1,23,3,1,1,2,2,2,2,0 +2,15,2,3,802,1,5,4,3,1,3,3,37,3,2,1,3,1,1,2,0 +2,48,2,2,9960,1,2,1,2,1,2,3,26,1,2,1,3,2,2,2,0 +4,24,3,9,8648,1,2,2,3,1,2,3,27,1,2,2,3,2,2,2,0 +1,18,2,3,1345,1,3,4,4,1,3,1,26,3,2,1,3,2,1,2,0 +1,45,2,3,1845,1,3,4,3,1,4,4,23,3,3,1,3,2,2,2,0 +1,21,1,0,1647,5,3,4,3,1,2,2,40,1,2,2,2,1,1,2,0 +4,48,2,9,4844,1,1,3,3,1,2,3,33,3,1,1,4,2,2,2,0 +2,27,0,9,8318,1,5,2,2,1,4,4,42,2,3,2,4,2,2,2,0 +3,18,2,3,2100,1,3,4,3,2,2,1,37,3,2,1,3,2,1,2,0 +1,45,0,9,11816,1,5,2,3,1,4,3,29,3,1,2,3,2,1,2,0 +1,6,2,6,448,1,2,4,2,1,4,2,23,3,2,1,3,2,1,2,0 +1,30,2,5,11998,1,2,1,1,1,1,4,34,1,2,1,2,2,2,2,0 +2,48,0,10,18424,1,3,1,2,1,2,2,32,1,2,1,4,2,2,1,0 +1,6,2,0,14896,1,5,1,3,1,4,4,68,1,2,1,4,2,2,2,0 +2,12,2,2,2762,5,5,1,2,1,2,2,25,3,2,1,3,2,2,2,0 +1,12,2,1,3386,1,5,3,3,1,4,4,35,3,3,1,3,2,2,2,0 +2,24,2,3,2039,1,2,1,4,1,1,2,22,3,2,1,3,2,2,2,0 +4,18,3,9,2169,1,3,4,4,1,2,3,28,3,2,1,3,2,2,2,0 +2,48,4,2,5096,1,3,2,2,1,3,3,30,3,2,1,4,2,2,2,0 +1,18,2,3,1882,1,3,4,2,1,4,3,25,1,1,2,3,2,1,2,0 +1,48,2,3,6999,1,4,1,4,3,1,1,34,3,2,2,3,2,2,2,0 +4,12,4,9,2292,1,1,4,3,1,2,3,42,2,2,2,4,2,2,2,0 +1,14,2,0,8978,1,5,1,1,1,4,2,45,3,2,1,4,2,2,1,0 +1,12,2,3,674,2,4,4,4,1,1,2,20,3,2,1,3,2,1,2,0 +1,18,2,0,976,1,2,1,2,1,2,3,23,3,2,1,2,2,1,2,0 +2,24,2,0,2718,1,3,3,2,1,4,2,20,3,1,1,2,2,2,2,0 +1,18,2,6,750,1,1,4,2,1,1,1,27,3,2,1,1,2,1,2,0 +2,24,2,1,12579,1,5,4,2,1,2,4,44,3,3,1,4,2,2,2,0 +1,18,2,1,7511,5,5,1,3,1,4,2,51,3,3,1,3,1,2,2,0 +1,18,4,0,3966,1,5,1,2,1,4,1,33,1,1,3,3,2,2,2,0 +1,12,0,3,6199,1,3,4,3,1,2,2,28,3,1,2,3,2,2,2,0 +1,24,2,3,1987,1,3,2,3,1,4,1,21,3,1,1,2,1,1,2,0 +1,24,2,0,2303,1,5,4,3,2,1,1,45,3,2,1,3,2,1,2,0 +4,21,4,0,12680,5,5,4,3,1,4,4,30,3,3,1,4,2,2,2,0 +2,12,2,3,6468,5,1,2,3,1,1,4,52,3,2,1,4,2,2,2,0 +1,30,2,2,6350,5,5,4,3,1,4,2,31,3,2,1,3,2,1,2,0 diff --git a/methods/catalog/rbr/library/reproduce/utils_general.py b/methods/catalog/rbr/library/reproduce/utils_general.py new file mode 100644 index 0000000..e8188c4 --- /dev/null +++ b/methods/catalog/rbr/library/reproduce/utils_general.py @@ -0,0 +1,82 @@ +import pandas as pd +from sklearn.compose import ColumnTransformer +from sklearn.preprocessing import OneHotEncoder, StandardScaler + + +class Transformer: + def __init__(self, data, numerical, scale=True): + self.data = data + self.empty_df = pd.DataFrame(columns=data.columns) + + self.num_name = numerical + self.cat_name = list(data.columns.difference(numerical)) + + numeric_transformer = StandardScaler(with_std=scale, with_mean=scale) + + categorical_transformer = OneHotEncoder(handle_unknown="ignore") + + self.encs = [] + if len(self.num_name) > 0: + self.encs.append(("num", numeric_transformer, self.num_name)) + if len(self.cat_name) > 0: + self.encs.append(("cat", categorical_transformer, self.cat_name)) + + self.transformer = ColumnTransformer(transformers=self.encs) + + self.transformer.fit(data) + + self.cat_indices = [] + self.n_num_features_out = len(self.num_name) + + if len(self.num_name) > 0: + self.enc_num = self.transformer.named_transformers_["num"] + if len(self.cat_name) > 0: + self.enc_cat = self.transformer.named_transformers_["cat"] + + self.n_cat_features_out = len(self.enc_cat.get_feature_names()) + self.cat_indices = list( + range( + self.n_num_features_out, + self.n_num_features_out + self.n_cat_features_out, + ) + ) + else: + self.n_cat_features_out = 0 + + def transform(self, X): + return self.transformer.transform(X) + + def inverse_transform(self, X): + df = self.empty_df.copy() + n_num = len(self.num_name) + + if n_num > 0: + X_num = X[..., :n_num] + inv_num = self.enc_num.inverse_transform(X_num) + df[self.num_name] = inv_num + + if len(self.cat_name) > 0: + X_cat = X[..., n_num:] + inv_cat = self.enc_cat.inverse_transform(X_cat) + df[self.cat_name] = inv_cat + + return df + + +def get_transformer(dataset_name, dataset): + # dataset_name = dataset_name.replace("_modified", "") + if "german" in dataset_name: + numerical = [ + "age", + "amount", + "duration", + ] + else: + raise ValueError(f"Dataset {dataset_name} not recognized for transformer.") + # dataset = pd.read_csv(f"methods/catalog/rbr/library/{dataset_name}.csv") # keep it in the same location for simplicity + + dataset = dataset.drop("y", axis=1) + + transformer = Transformer(dataset, numerical, ("synthesis" not in dataset_name)) + + return transformer diff --git a/methods/catalog/rbr/library/reproduce/utils_reproduce.py b/methods/catalog/rbr/library/reproduce/utils_reproduce.py new file mode 100644 index 0000000..7f4345d --- /dev/null +++ b/methods/catalog/rbr/library/reproduce/utils_reproduce.py @@ -0,0 +1,600 @@ +# util functions from the original RBR implementation +# meant mostly for the reproduce.py script + +# Following instructions by professor Karimi, +# this file will contain all the preprocesing/postprocessing functions +# and model creation, that exists in this repo, but modified to function +# with the implementation of RBR in methods/catalog/rbr to get the results +# as close as possible to the original paper. +from typing import Any, List, Union + +import numpy as np +import pandas as pd +import tensorflow as tf +import torch +import torch.nn as nn +import torch.nn.functional as F +from sklearn.model_selection import KFold, train_test_split + +from data.api.data import Data +from methods.catalog.rbr.library.reproduce.utils_general import Transformer +from models.api.mlmodel import MLModel + + +# define the model used in the paper +# Custom Pytorch Module for Neural Networks +class PyTorchNeuralNetworkWrapper(torch.nn.Module): + """ + Initializes a PyTorch neural network model with specified number of inputs, outputs, and neurons. + + Parameters + ---------- + n_inputs (int): Number of input features. + + Returns + ------- + PyTorchNeuralNetwork. + + Raises + ------- + None. + """ + + # Constructor + def __init__(self, n_inputs): + super(PyTorchNeuralNetworkWrapper, self).__init__() + self.fc1 = torch.nn.Linear(n_inputs, 20) + self.fc2 = torch.nn.Linear(20, 50) + self.fc3 = torch.nn.Linear(50, 20) + self.out = torch.nn.Linear(20, 1) + + for p in self.parameters(): + if len(p.shape) > 1: + nn.init.xavier_uniform_(p) + + # Predictions + def forward(self, x): + """ + Performs the forward pass of the neural network. + + Parameters + ------- + x (torch.Tensor): Input tensor to the neural network. + + Returns + ------- + torch.Tensor: Predicted output tensor. + + Raises + ------- + None. + """ + x = F.relu(self.fc1(x)) + x = F.relu(self.fc2(x)) + x = F.relu(self.fc3(x)) + x = torch.sigmoid(self.out(x)) + return x + + def fit(self, x_train, y_train): + """ + Fits the neural network to the training data. + + Parameters + ---------- + x_train (array-like): Input training data. + y_train (array-like): Target training data. + + Returns + ------- + PyTorchNeuralNetwork: Trained neural network instance. + + Raises + ------ + None. + """ + device = torch.device("cuda" if torch.cuda.is_available() else "cpu") + # device = next(self.parameters()).device + x_train_tensor = torch.from_numpy(np.array(x_train, dtype=np.float32)) + y_train_tensor = torch.from_numpy(np.array(y_train, dtype=np.float32)) + # print(f"x_train_tensor shape: {x_train_tensor[:5]}") + # print(f"y_train_tensor shape: {y_train_tensor.view(-1,1)[:5]}") + + # train_dataset = torch.utils.data.TensorDataset(x_train_tensor, y_train_tensor) + # train_loader = torch.utils.data.DataLoader( + # dataset=train_dataset, batch_size=1000, shuffle=True + # ) + self.train() + # defining the optimizer + optimizer = torch.optim.Adam(self.parameters(), lr=0.001, weight_decay=0.1) + # defining Cross-Entropy loss + criterion = torch.nn.BCELoss(reduction="sum") + + loss_diff = 1.0 + prev_loss = 0.0 + num_stable_iter = 0 + max_stable_iter = 3 + + epochs = 1000 # TODO increase epochs because paper base is 1000 + for i in range(epochs): + # for i, (data, target) in enumerate(train_loader): + optimizer.zero_grad() + output = self(x_train_tensor.to(device)) + loss = criterion(output, y_train_tensor.view(-1, 1).to(device)) + + loss.backward() + optimizer.step() + + # print("Iter %d: loss: %f" % (i, loss.data.item())) + + loss_diff = prev_loss - loss.data.item() + + if loss_diff <= 1e-7: + num_stable_iter += 1 + if num_stable_iter >= max_stable_iter: + break + else: + num_stable_iter = 0 + + prev_loss = loss.data.item() + + self.eval() + + return self + + def predict(self, test): + """ + Predicts using the trained neural network. + + Parameters + ------- + test (torch.Tensor): Input tensor for prediction. + + Returns + ------- + torch.Tensor: Predicted output tensor. + + Raises + ------- + None. + """ + device = next(self.parameters()).device + self.eval() + y_train_pred = [] + with torch.no_grad(): + output = self(test.to(device)) + # print(f"output shape in predict: {output[:5]}") + y_train_pred.extend(output) + + y_train_pred = torch.stack(y_train_pred) + # print(f"y_train_pred shape: {y_train_pred[:5]}") + return y_train_pred + + +# implement my own version of the DataCatalog that uses this model +class DataCatalogWrapper(Data): + """ + Custom Data class for handling dataset operations. + + Parameters + ---------- + df: pd.DataFrame + The input dataframe containing the dataset. + continuous: List[str] + List of continuous feature names. + categorical: List[str] + List of categorical feature names. + immutable: List[str] + List of immutable feature names. + target: str + The target variable name. + + Returns + ------- + DataTemp + """ + + def __init__( + self, + df_name: str, + X_train: pd.DataFrame, + X_test: pd.DataFrame, + y_train: pd.Series, + y_test: pd.Series, + continuous: List[str], + categorical: List[str], + immutable: List[str], + transformer: Transformer, + target: str, + ): + # self._df = df.copy() + self._continuous = continuous + self._categorical = categorical + self._immutable = immutable + self._target = target + # create train/test split + # for simplicity, we will just do a simple split here + + X_train = transformer.transform(X_train) + X_test = transformer.transform(X_test) + + # dataset_obj = pd.concat([X_train, X_test], ignore_index=True) + # output_merge = pd.concat([y_train, y_test], ignore_index=True) + # dataset_obj["y"] = output_merge + X_train = pd.DataFrame(X_train) + X_test = pd.DataFrame(X_test) + + X_train["y"] = y_train.values + X_test["y"] = y_test.values + + self._df_train = X_train + self._df_test = X_test + + @property + def categorical(self) -> List[str]: + """ + Provides the column names of categorical data. + Column names do not contain encoded information as provided by a get_dummy() method (e.g., sex_female) + + Label name is not included. + + Returns + ------- + list of Strings + List of all categorical columns + """ + return self._categorical + + @property + def continuous(self) -> List[str]: + """ + Provides the column names of continuous data. + + Label name is not included. + + Returns + ------- + list of Strings + List of all continuous columns + """ + return self._continuous + + @property + def df(self) -> pd.DataFrame: + """ + The full Dataframe. + + Returns + ------- + pd.DataFrame + """ + return None + + @property + def df_train(self) -> pd.DataFrame: + """ + The training split Dataframe. + + Returns + ------- + pd.DataFrame + """ + return self._df_train.copy() + + @property + def df_test(self) -> pd.DataFrame: + """ + The testing split Dataframe. + + Returns + ------- + pd.DataFrame + """ + return self._df_test.copy() + + @property + def immutables(self) -> Union[List[str], None]: + """ + Provides the column names of immutable data. + + Label name is not included. + + Returns + ------- + list of Strings + List of all immutable columns + """ + return None + + @property + def target(self) -> str: + """ + Provides the name of the label column. + + Returns + ------- + str + Target label name + """ + return self._target + + def transform(self, df: pd.DataFrame) -> pd.DataFrame: + """ + Transforms input for prediction into correct form. + Only possible for DataFrames without preprocessing steps. + + Recommended to keep correct encodings and normalization + + Parameters + ---------- + df : pd.DataFrame + Contains raw (not normalized and not encoded) data. + + Returns + ------- + output : pd.DataFrame + Prediction input normalized and encoded + + """ + output = df.copy() + return output + + def inverse_transform(self, df: pd.DataFrame) -> pd.DataFrame: + """ + Transforms output after prediction back into original form. + Only possible for DataFrames with preprocessing steps. + + Parameters + ---------- + df : pd.DataFrame + Contains normalized and encoded data. + + Returns + ------- + output : pd.DataFrame + Prediction output denormalized and decoded + + """ + output = df.copy() + return output + + +# Make my own verion of ModelCatalog that uses this model +class ModelCatalogWrapper(MLModel): + """ + Use pretrained classifier. + + Parameters + ---------- + data : data.catalog.DataCatalog Class + Correct dataset for ML model. + + Methods + ------- + predict: + One-dimensional prediction of ml model for an output interval of [0, 1]. + predict_proba: + Two-dimensional probability prediction of ml model + + Returns + ------- + None + """ + + def __init__( + self, + data: Data, + # train_data: pd.DataFrame, + model_type: str = "mlp", + backend: str = "pytorch", + # model_type: str, # we are just using the mlp for this paper + **kws, + ) -> None: + """ + Constructor for pretrained ML models from the catalog. + + Possible backends are currently "pytorch", "tensorflow" for "ann" and "linear" models. + Possible backends are currently "sklearn", "xgboost" for "forest" models. + + """ + super().__init__(data) + self._model_type = model_type + self._backend = backend + + self._continuous = data.continuous + self._categorical = data.categorical + + self._feature_input_order = data.df_train.drop( + columns=[data.target] + ).columns.tolist() + + self._model = PyTorchNeuralNetworkWrapper( + n_inputs=len(self._feature_input_order) + ) + + if self.backend == "pytorch": + device = torch.device("cuda" if torch.cuda.is_available() else "cpu") + self._model = self._model.to(device) + + tmp_text = ( + f"Training {self._model_type} model using {self._backend} backend on device {device}" + + f" with {len(data.df_train)} training samples." + ) + print(tmp_text) + + # kf = KFold(n_splits=5, shuffle=True, random_state=42) + + # for i, (train_index, cross_index) in enumerate(kf.split(data.df_train.drop(columns=[data.target]))): + # # Get the training data for this fold + # X_training = data.df_train[train_index].drop(columns=[data.target]) + # y_training = data.df_train[train_index][data.target] + + self._model = self._model.fit( + x_train=data.df_train.drop(columns=[data.target]), + y_train=data.df_train[data.target], + ) + + def _test_accuracy(self): + # get preprocessed data + df_test = self.data.df_test + + x_test = df_test[list(set(df_test.columns) - {self.data.target})] + y_test = df_test[self.data.target] + + prediction = (self.predict(x_test) > 0.5).flatten() + correct = prediction == y_test + print(f"test accuracy for model: {correct.mean()}") + + @property + def feature_input_order(self) -> List[str]: + """ + Saves the required order of feature as list. + + Prevents confusion about correct order of input features in evaluation + + Returns + ------- + ordered_features : list of str + Correct order of input features for ml model + """ + return self._feature_input_order + + @property + def model_type(self) -> str: + """ + Describes the model type + + E.g., ann, linear + + Returns + ------- + backend : str + model type + """ + return self._model_type + + @property + def backend(self) -> str: + """ + Describes the type of backend which is used for the ml model. + + E.g., tensorflow, pytorch, sklearn, ... + + Returns + ------- + backend : str + Used framework + """ + return self._backend + + @property + def raw_model(self) -> Any: + """ + Returns the raw ML model built on its framework + + Returns + ------- + ml_model : tensorflow, pytorch, sklearn model type + Loaded model + """ + return self._model + + def predict( + self, x: Union[np.ndarray, pd.DataFrame, torch.Tensor, tf.Tensor] + ) -> Union[np.ndarray, pd.DataFrame, torch.Tensor, tf.Tensor]: + """ + One-dimensional prediction of ml model for an output interval of [0, 1] + + Shape of input dimension has to be always two-dimensional (e.g., (1, m), (n, m)) + + Parameters + ---------- + x : np.Array, pd.DataFrame, or backend specific (tensorflow or pytorch tensor) + Tabular data of shape N x M (N number of instances, M number of features) + + Returns + ------- + output : np.ndarray, or backend specific (tensorflow or pytorch tensor) + Ml model prediction for interval [0, 1] with shape N x 1 + """ + + if len(x.shape) != 2: + raise ValueError( + "Input shape has to be two-dimensional, (instances, features)." + ) + + if self._backend == "pytorch": + return self.predict_proba(x)[:, 1].reshape((-1, 1)) + elif self._backend == "tensorflow": + # keep output in shape N x 1 + # order data (column-wise) before prediction + x = self.get_ordered_features(x) + return self._model.predict(x)[:, 1].reshape((-1, 1)) + elif self._backend == "sklearn" or self._backend == "xgboost": + return self._model.predict(self.get_ordered_features(x)) + else: + raise ValueError( + 'Incorrect backend value. Please use only "pytorch" or "tensorflow".' + ) + + def predict_proba( + self, x: Union[np.ndarray, pd.DataFrame, torch.Tensor, tf.Tensor] + ) -> Union[np.ndarray, pd.DataFrame, torch.Tensor, tf.Tensor]: + """ + Two-dimensional probability prediction of ml model + + Shape of input dimension has to be always two-dimensional (e.g., (1, m), (n, m)) + + Parameters + ---------- + x : np.Array, pd.DataFrame, or backend specific (tensorflow or pytorch tensor) + Tabular data of shape N x M (N number of instances, M number of features) + + Returns + ------- + output : np.ndarray, or backend specific (tensorflow or pytorch tensor) + Ml model prediction with shape N x 2 + """ + + # order data (column-wise) before prediction + x = self.get_ordered_features(x) + + if len(x.shape) != 2: + raise ValueError("Input shape has to be two-dimensional") + + if self._backend == "pytorch": + # Keep model and input on the same device + device = "cuda" if torch.cuda.is_available() else "cpu" + self._model = self._model.to(device) + + if isinstance(x, pd.DataFrame): + _x = x.values + elif isinstance(x, torch.Tensor): + _x = x.clone() + else: + _x = x.copy() + + # If the input was a tensor, return a tensor. Else return a np array. + tensor_output = torch.is_tensor(x) + if not tensor_output: + _x = torch.Tensor(_x) + + # input, tensor_output = ( + # (torch.Tensor(x), False) if not torch.is_tensor(x) else (x, True) + # ) + + _x = _x.to(device) + output = self._model.predict(_x) + + if tensor_output: + return output + else: + return output.detach().cpu().numpy() + + elif self._backend == "tensorflow": + return self._model.predict(x) + elif self._backend == "sklearn" or self._backend == "xgboost": + return self._model.predict_proba(x) + else: + raise ValueError( + 'Incorrect backend value. Please use only "pytorch" or "tensorflow".' + ) diff --git a/methods/catalog/rbr/model.py b/methods/catalog/rbr/model.py new file mode 100644 index 0000000..8ab1e25 --- /dev/null +++ b/methods/catalog/rbr/model.py @@ -0,0 +1,164 @@ +from typing import Dict, Optional + +import numpy as np +import pandas as pd + +from methods.catalog.rbr.library.rbr_loss import robust_bayesian_recourse +from methods.processing import check_counterfactuals +from methods.processing.counterfactuals import merge_default_parameters +from models.catalog.catalog import ModelCatalog + +from ...api import RecourseMethod + +RANDOM_SEED = 54321 + + +class RBR(RecourseMethod): + """ + Implementation of Robust Bayesian Recourse [1]_. + + Parameters + ---------- + mlmodel : model.MLModel + Black-Box-Model + hyperparams : dict + Dictionary containing hyperparameters. See Notes below to see its content. + + Methods + ------- + get_counterfactuals: + Generate counterfactual examples for given factuals. + + Notes + ----- + - Hyperparams + Hyperparameter contains important information for the recourse method to initialize. + Please make sure to pass all values as dict with the following keys. + + * "num_samples": int, default: 200 + Number of samples around factual instance. + * "perturb_radius": float, default: 0.2 + Radius for perturbation around factual instance. + * "delta_plus": float, default: 1.0 + Tolarance for cost of recourse (l1). + * "sigma": float, default: 1.0 + Standard deviation for Gaussian noise. + * "epsilon_op": float, default: 1.0 + Tolerance parameter for optimistic likelihood. + * "epsilon_pe": float, default: 1.0 + Tolerance parameter for pesimistic likelihood. + * "max_iter": int, default: 500 + Maximum number of iterations. + * "device": str, default: "cpu" + Device to run computations on ("cpu" or "cuda"). + * "clamp": bool, default: False + Whether to clamp final values to [0,1] range. + * "train_data": array-like, default: None + Training data used to find boundry point X_b + * "reproduce": bool, default: False + Strictly for reproducibility tests. + + - Restrictions + * Requires training data to be provided. There must be instances of target class in the training data. + * This method only works with mlp. Not guaranteed to get counterfactual. + + .. [1] Nguyen, Tuan-Duy Hien, Ngoc Bui, Duy Nguyen, Man-Chung Yue, and Viet Anh Nguyen. 2022. + "Robust Bayesian Recourse." (UAI 2022) + """ + + _DEFAULT_HYPERPARAMS = { + "num_samples": 200, + "perturb_radius": 0.2, + "delta_plus": 1.0, + "sigma": 1.0, + "epsilon_op": 1.0, + "epsilon_pe": 1.0, + "max_iter": 500, + "device": "cpu", + "clamp": False, + "train_data": None, + "reproduce": False, + } + + def __init__(self, mlmodel: ModelCatalog, hyperparams: Optional[Dict] = None): + supported_backends = ["pytorch"] + if mlmodel.backend not in supported_backends: + raise ValueError( + f"{mlmodel.backend} not supported (RBR supports: {supported_backends})" + ) + + super().__init__(mlmodel) + checked = merge_default_parameters(hyperparams, self._DEFAULT_HYPERPARAMS) + self._num_samples = checked["num_samples"] + self._perturb_radius = checked["perturb_radius"] + self._delta_plus = checked["delta_plus"] + self._sigma = checked["sigma"] + self._epsilon_op = checked["epsilon_op"] + self._epsilon_pe = checked["epsilon_pe"] + self._max_iter = checked["max_iter"] + self._device = checked["device"] + self._clamp = checked["clamp"] + self._train_data = checked["train_data"] + self._reproduce = checked["reproduce"] + + def get_counterfactuals(self, factuals: pd.DataFrame) -> pd.DataFrame: + factuals = factuals.reset_index() + factuals = self._mlmodel.get_ordered_features(factuals) + + # print(factuals) + + # categorical encoded feature indices (if MLModel provides list) + # encoded_feature_names = self._mlmodel.data.categorical + # cat_features_indices = [factuals.columns.get_loc(f) for f in encoded_feature_names] if len(encoded_feature_names) > 0 else None + + train_data = self._train_data + if train_data is None: + raise ValueError( + "RBR needs training data available via hyperparams['train_data']" + ) + + # ensure numpy arrays: + if isinstance(train_data, pd.DataFrame): + train_np = train_data.values + else: + train_np = train_data + + def apply_rbr(x_row): + x_np = x_row.reshape((1, -1)).astype(float) + # print(f"x_np: {type(x_np)}") + cf = robust_bayesian_recourse( + self._mlmodel.raw_model, + x_np.squeeze(), + # cat_features_indices=cat_features_indices, + train_data=train_np, + num_samples=self._num_samples, + perturb_radius=self._perturb_radius, + delta_plus=self._delta_plus, + sigma=self._sigma, + epsilon_op=self._epsilon_op, + epsilon_pe=self._epsilon_pe, + max_iter=self._max_iter, + dev=self._device, + random_state=RANDOM_SEED, + verbose=False, + ) + # optional final clamp (0,1) if requested + # print(f"cf before clamp: {cf}") + if self._clamp: + cf = cf.clip(0.0, 1.0) + return cf + + df_cfs = factuals.apply(lambda row: apply_rbr(row), raw=True, axis=1) + if self._reproduce is True: + # print(f"Print predection since the model we are using is returning a single value: {self._mlmodel.predict_proba(df_cfs)}") + # print("If the above value is over 50, the be passed, regardless of the bottom failure.") + df_cfs[self._mlmodel.data.target] = ( + 1 if self._mlmodel.predict_proba(df_cfs).flatten()[0] >= 0.5 else 0 + ) + df_cfs.loc[df_cfs[self._mlmodel.data.target] == 0, :] = np.nan + else: + df_cfs = np.array(df_cfs) + df_cfs = pd.DataFrame(df_cfs, columns=self._mlmodel.feature_input_order) + df_cfs = check_counterfactuals(self._mlmodel, df_cfs, factuals.index) + df_cfs = self._mlmodel.get_ordered_features(df_cfs) + return df_cfs diff --git a/methods/catalog/rbr/reproduce.py b/methods/catalog/rbr/reproduce.py new file mode 100644 index 0000000..c0344a5 --- /dev/null +++ b/methods/catalog/rbr/reproduce.py @@ -0,0 +1,354 @@ +from collections import namedtuple + +import numpy as np +import pandas as pd +import pytest +import torch +from sklearn.model_selection import train_test_split + +from data.catalog.online_catalog import DataCatalog +from methods.catalog.rbr.library.reproduce.utils_general import get_transformer +from methods.catalog.rbr.library.reproduce.utils_reproduce import ( + DataCatalogWrapper, + ModelCatalogWrapper, +) +from methods.catalog.rbr.model import RBR +from models.catalog.catalog import ModelCatalog + +from ...api import RecourseMethod + +RANDOM_SEED = 54321 + +RecourseResult = namedtuple( + "RecourseResults", ["l1_cost", "cur_valid", "fut_valid", "feasible"] +) + + +def lp_dist(x, y, p=2): + return np.linalg.norm(x - y, ord=p) + + +def calc_future_validity(x, shifted_models): + preds = [] + for model in shifted_models: + pred = model.raw_model.predict(x) + pred = pred.detach().cpu().numpy() if torch.is_tensor(pred) else pred + pred = 1 if pred >= 0.5 else 0 + preds.append(pred) + preds = np.array(preds) + return np.mean(preds) + + +def run_single_instance( + idx: int, + method_object: RecourseMethod, # we will only use RBR + x0_numpy: np.ndarray, + x0_df: pd.DataFrame, + shifted_models: list, +): + """ + Runs recourse on a single instance using the implemented RBR method + """ + counterfactual_df = method_object.get_counterfactuals(x0_df) + + # print(f"Counterfactual: {counterfactual_df}") + + if counterfactual_df.empty: + print(f"error for {idx}: no counterfactual found") + return RecourseResult(np.inf, 0, 0, False) + + counterfactual_df = method_object._mlmodel.get_ordered_features(counterfactual_df) + + # print(f"Counterfactual after get feature order: {counterfactual_df}") + + x_cf_numpy = counterfactual_df.iloc[0].to_numpy() + + print(f"x0_numpy: {x0_numpy}, x_cf_numpy: {x_cf_numpy}") + + # l1 cost + l1_cost = lp_dist(x0_numpy, x_cf_numpy, p=1) + + cf_tensor = torch.from_numpy(counterfactual_df.values.astype(np.float32)) + + # current validity + cur_valid = method_object._mlmodel.raw_model.predict(cf_tensor) + cur_valid = 1 if cur_valid >= 0.5 else 0 + + # future validity + # fut_valid = calc_future_validity(x_cf_numpy, shifted_models) + fut_valid = calc_future_validity(cf_tensor, shifted_models) + + return RecourseResult(l1_cost, float(cur_valid), fut_valid, True) + + +@pytest.mark.parametrize( + "dataset_name, model_type, backend", + [ + ("german", "mlp", "pytorch"), + ], +) +def test_rbr(dataset_name, model_type, backend): + + np.random.seed(RANDOM_SEED) + torch.manual_seed(RANDOM_SEED) + + # load the csv as a pandas DataFrame + dataset = pd.read_csv( + f"methods/catalog/rbr/library/reproduce/data/{dataset_name}.csv" + ) + dataset_shifted = pd.read_csv( + f"methods/catalog/rbr/library/reproduce/data/{dataset_name}_modified.csv" + ) + + num_feat = ["duration", "amount", "age"] + cat_feat = ["personal_status_sex"] + target = "credit_risk" + + df1 = dataset.drop( + columns=[c for c in list(dataset) if c not in num_feat + cat_feat + [target]] + ) + temp = dataset_shifted.drop( + columns=[ + c for c in list(dataset_shifted) if c not in num_feat + cat_feat + [target] + ] + ) + + df1.rename(columns={"credit_risk": "y"}, inplace=True) + temp.rename(columns={"credit_risk": "y"}, inplace=True) + + X = df1.drop(columns=["y"]) + y = df1["y"] + + X_temp = temp.drop(columns=["y"]) + y_temp = temp["y"] + + X_train, X_test, y_train, y_test = train_test_split( + X, y, train_size=0.8, random_state=42, stratify=y + ) + + combined_df = pd.concat([df1, temp], ignore_index=True) + + transformer = get_transformer(dataset_name, combined_df.copy()) + + dataset_org = DataCatalogWrapper( + df_name=dataset_name, + X_train=X_train, + X_test=X_test, + y_train=y_train, + y_test=y_test, + continuous=num_feat, + categorical=cat_feat, + immutable=[], + transformer=transformer, + target="y", + ) + + X_train_temp, _, y_train_temp, _ = train_test_split( + X_temp, y_temp, train_size=0.5, random_state=1, stratify=y_temp + ) + future_X = pd.concat([X_train, X_train_temp], ignore_index=True) + future_y = pd.concat([y_train, y_train_temp], ignore_index=True) + dataset_shifted_1 = DataCatalogWrapper( + df_name=dataset_name + "_modified", + X_train=future_X, + X_test=X_test, + y_train=future_y, + y_test=y_test, + continuous=num_feat, + categorical=cat_feat, + immutable=[], + transformer=transformer, + target="y", + ) + + X_train_temp, _, y_train_temp, _ = train_test_split( + X_temp, y_temp, train_size=0.5, random_state=2, stratify=y_temp + ) + future_X = pd.concat([X_train, X_train_temp], ignore_index=True) + future_y = pd.concat([y_train, y_train_temp], ignore_index=True) + dataset_shifted_2 = DataCatalogWrapper( + df_name=dataset_name + "_modified", + X_train=future_X, + X_test=X_test, + y_train=future_y, + y_test=y_test, + continuous=num_feat, + categorical=cat_feat, + immutable=[], + transformer=transformer, + target="y", + ) + + X_train_temp, _, y_train_temp, _ = train_test_split( + X_temp, y_temp, train_size=0.5, random_state=3, stratify=y_temp + ) + future_X = pd.concat([X_train, X_train_temp], ignore_index=True) + future_y = pd.concat([y_train, y_train_temp], ignore_index=True) + dataset_shifted_3 = DataCatalogWrapper( + df_name=dataset_name + "_modified", + X_train=future_X, + X_test=X_test, + y_train=future_y, + y_test=y_test, + continuous=num_feat, + categorical=cat_feat, + immutable=[], + transformer=transformer, + target="y", + ) + + X_train_temp, _, y_train_temp, _ = train_test_split( + X_temp, y_temp, train_size=0.5, random_state=4, stratify=y_temp + ) + future_X = pd.concat([X_train, X_train_temp], ignore_index=True) + future_y = pd.concat([y_train, y_train_temp], ignore_index=True) + dataset_shifted_4 = DataCatalogWrapper( + df_name=dataset_name + "_modified", + X_train=future_X, + X_test=X_test, + y_train=future_y, + y_test=y_test, + continuous=num_feat, + categorical=cat_feat, + immutable=[], + transformer=transformer, + target="y", + ) + + X_train_temp, _, y_train_temp, _ = train_test_split( + X_temp, y_temp, train_size=0.5, random_state=5, stratify=y_temp + ) + future_X = pd.concat([X_train, X_train_temp], ignore_index=True) + future_y = pd.concat([y_train, y_train_temp], ignore_index=True) + dataset_shifted_5 = DataCatalogWrapper( + df_name=dataset_name + "_modified", + X_train=future_X, + X_test=X_test, + y_train=future_y, + y_test=y_test, + continuous=num_feat, + categorical=cat_feat, + immutable=[], + transformer=transformer, + target="y", + ) + + # load the dataset and model + # these are temporary classes for testing/reproducing + # dataset_shifted_2 = DataCatalogWrapper(df_name=dataset_name+'_modified', df=df3, continuous=num_feat, categorical=cat_feat, immutable=[], transformer=transformer, target='y') + # dataset_shifted_3 = DataCatalogWrapper(df_name=dataset_name+'_modified', df=df4, continuous=num_feat, categorical=cat_feat, immutable=[], transformer=transformer, target='y') + # dataset_shifted_4 = DataCatalogWrapper(df_name=dataset_name+'_modified', df=df5, continuous=num_feat, categorical=cat_feat, immutable=[], transformer=transformer, target='y') + + # df = dataset.df() + # X_df = df.drop(columns=['y'], axis=1) + # y_s = df['y'] + + model = ModelCatalogWrapper( + data=dataset_org, model_type=model_type, backend=backend + ) # these are temporary classes for testing/reproducing + + model_shifted_1 = ModelCatalogWrapper( + dataset_shifted_1, model_type, backend + ) # these are temporary classes for testing/reproducing + model_shifted_2 = ModelCatalogWrapper( + dataset_shifted_2, model_type, backend + ) # these are temporary classes for testing/reproducing + model_shifted_3 = ModelCatalogWrapper( + dataset_shifted_3, model_type, backend + ) # these are temporary classes for testing/reproducing + model_shifted_4 = ModelCatalogWrapper( + dataset_shifted_4, model_type, backend + ) # these are temporary classes for testing/reproducing + model_shifted_5 = ModelCatalogWrapper(dataset_shifted_5, model_type, backend) + # model._test_accuracy() + + device = torch.device("cuda" if torch.cuda.is_available() else "cpu") + rbr = RBR( + model, + hyperparams={ + "device": device, + "train_data": dataset_org.df_train.drop(columns=["y"], axis=1), + "reproduce": True, + }, + ) + + real_x_test = dataset_org.df_test.drop(columns=["y"], axis=1) + # print(real_x_test[:10]) + # y_test = dataset.df_test['y'] + + # X_test = X_test[y_test == 0] # only negative class + # want a few samples that the original model classifies as negative + preds_test = model.raw_model.predict( + torch.from_numpy(real_x_test.values.astype(np.float32)) + ) + # print(f"Predictions on test set: {preds_test.flatten()}") + mask = (preds_test.flatten() < 0.5).detach().cpu().numpy() + real_x_test = real_x_test[mask] + + n = 10 # X_test.shape[0] + + factuals = real_x_test.sample(n=n, random_state=RANDOM_SEED) + + running_current_val = 0 + running_future_val = 0 + running_cost = 0 + + for idx in range(len(factuals)): + x0_df = factuals.iloc[[idx]] + x0_numpy = x0_df.to_numpy() + + result = run_single_instance( + idx, + rbr, + x0_numpy, + x0_df, + shifted_models=[ + model_shifted_1, + model_shifted_2, + model_shifted_3, + model_shifted_4, + model_shifted_5, + ], + ) + running_current_val += result.cur_valid + running_future_val += result.fut_valid + running_cost += result.l1_cost + print( + f"Instance {idx}: L1 cost = {result.l1_cost}, Current Validity = {result.cur_valid}, Future Validity = {result.fut_valid}, Feasible = {result.feasible}" + ) + print( + f"Average: L1 cost = {running_cost/n}, Current Validity = {running_current_val/n}, Future Validity = {running_future_val/n}" + ) + + assert running_current_val / n >= 0.9 + assert running_future_val / n >= 0.7 + + +def sanity_check(dataset_name, model_type, backend): + dataset = DataCatalog(dataset_name, model_type, 0.8) + + # load artificial neural network from catalog + model = ModelCatalog(dataset, model_type, backend) + + # get factuals from the data to generate counterfactual examples + factuals = (dataset._df_train).sample(n=5, random_state=RANDOM_SEED) + + # load a recourse model and pass black box model + device = torch.device("cuda" if torch.cuda.is_available() else "cpu") + rbr = RBR( + model, + hyperparams={ + "device": device, + "train_data": dataset._df_train.drop(columns=["y"], axis=1), + "reproduce": False, + }, + ) + + # generate counterfactual examples + counterfactuals = rbr.get_counterfactuals(factuals) + print(counterfactuals) + + +if __name__ == "__main__": + test_rbr("german", "mlp", "pytorch") + # sanity_check("german", "mlp", "pytorch")