diff --git a/exports/observation.csv b/exports/observation.csv deleted file mode 100644 index 31812ae..0000000 --- a/exports/observation.csv +++ /dev/null @@ -1,511 +0,0 @@ -observation_id,person_id,observation_concept_id,observation_date,observation_datetime,observation_type_concept_id,value_as_number,value_as_string,value_as_concept_id,qualifier_concept_id,unit_concept_id,provider_id,visit_occurrence_id,observation_source_value,observation_source_concept_id,unit_source_value,qualifier_source_value,value_source_concept_id,value_source_value,questionnaire_response_id -118208,5557,9425,3823,10549,1331,,16669.0,,,,,8936.0,11839,6242,1849.0,5535.0,,, -112221,5557,1268,8176,4688,1331,,16669.0,,,,,9908.0,7436,6037,1849.0,5535.0,,, -92924,5557,1268,3823,13501,1331,,16669.0,,,,,8043.0,7436,6037,1849.0,5535.0,,, -87525,5557,562,6238,5732,1331,,16669.0,,,,,2539.0,3555,914,1849.0,5535.0,,, -88732,5557,1268,3823,10549,1331,,16669.0,,,,,8936.0,7436,6037,1849.0,5535.0,,, -127541,5230,255,4070,17510,1331,,,5490.0,,,,8823.0,10678,7672,,,,, -143650,665,5987,6705,10269,1331,,,5490.0,,,,6992.0,11454,8747,,,,, -69801,665,8220,11554,17750,1331,,,5490.0,,,,6910.0,4645,7332,,,,, -102810,665,3637,1222,15887,1331,,,5490.0,,,,5139.0,5309,5692,,,,, -143746,665,8499,3363,666,1331,,,5490.0,,,,7948.0,5963,2112,,,,, -70261,665,7654,11258,18677,1331,,,5490.0,,,,12665.0,6282,333,,,,, -40451,665,3637,1330,6520,1331,,,5490.0,,,,5793.0,6548,7716,,,,, -20543,665,6866,9228,15133,1331,,,5490.0,,,,9252.0,11880,1768,,,,, -100742,665,5987,11806,11363,1331,,,5490.0,,,,,4102,192,,,,, -128493,665,3637,9520,13609,1331,,,5490.0,,,,8390.0,5309,5692,,,,, -118347,665,4084,11258,18677,1331,,,5490.0,,,,12665.0,1637,10082,,,,, -70737,665,9675,6995,21994,1331,,,5490.0,,,,8156.0,11592,3413,,,,, -16780,9034,2273,8988,12680,1331,,,5490.0,,,,11630.0,7739,5531,,,,, -48409,9034,2273,8530,10717,1331,,,5490.0,,,,2681.0,7739,5531,,,,, -95301,665,2123,3877,22319,1331,,,5490.0,,,,12932.0,4701,11099,,,,, -109187,665,8499,586,20186,1331,,,5490.0,,,,13320.0,5963,2112,,,,, -131936,665,3637,3019,1916,1331,,,5490.0,,,,326.0,5309,5692,,,,, -11545,665,9675,11258,18677,1331,,,5490.0,,,,12665.0,229,9044,,,,, -45240,665,9675,5659,7261,1331,,,5490.0,,,,3579.0,3906,3214,,,,, -94641,9034,2123,8988,12680,1331,,,5490.0,,,,11630.0,9951,1400,,,,, -61317,665,8499,12397,21443,1331,,,5490.0,,,,7050.0,5963,2112,,,,, -96121,665,5987,11258,18677,1331,,,5490.0,,,,12665.0,271,2597,,,,, -141495,9034,2273,4972,4053,1331,,,5490.0,,,,10150.0,7739,5531,,,,, -46013,665,7230,4975,8765,1331,,,5490.0,,,,8632.0,7140,9976,,,,, -27580,665,6866,10230,22399,1331,,,5490.0,,,,11228.0,11880,1768,,,,, -31663,665,1004,6701,19159,1331,,,5490.0,,,,12712.0,6220,7308,,,,, -53487,665,9425,9168,23137,1331,,,5490.0,,,,3789.0,954,6242,,,,, -96920,665,8499,7684,24162,1331,,,5490.0,,,,2125.0,6421,2112,,,,, -140655,665,566,11289,23244,1331,,,5490.0,,,,8937.0,496,7067,,,,, -22671,665,3637,7814,14867,1331,,,5490.0,,,,3367.0,5309,5692,,,,, -73299,665,5987,11801,10623,1331,,,5490.0,,,,9541.0,8239,8155,,,,, -33464,9034,5772,8988,12680,1331,,,5490.0,,,,11630.0,5293,3180,,,,, -116430,665,8499,9520,13609,1331,,,5490.0,,,,8390.0,6421,2112,,,,, -42612,665,566,11258,18677,1331,,,5490.0,,,,12665.0,496,7067,,,,, -151800,665,2123,6995,21994,1331,,,5490.0,,,,8156.0,8751,11099,,,,, -66963,665,5987,757,20626,1331,,,5490.0,,,,2315.0,11454,8747,,,,, -20955,665,9675,757,20626,1331,,,5490.0,,,,2315.0,229,9044,,,,, -29389,665,6866,9591,9107,1331,,,5490.0,,,,8111.0,11880,1768,,,,, -47723,665,8881,3877,22319,1331,,,5490.0,,,,12932.0,9490,836,,,,, -145483,665,8220,11289,23244,1331,,,5490.0,,,,8937.0,4645,7332,,,,, -148716,665,5987,1387,17146,1331,,,5490.0,,,,1679.0,4102,192,,,,, -60966,665,6866,6217,3474,1331,,,5490.0,,,,7540.0,11880,1768,,,,, -74964,665,8220,6705,10269,1331,,,5490.0,,,,6992.0,4645,7332,,,,, -111786,665,566,757,20626,1331,,,5490.0,,,,2315.0,496,7067,,,,, -41338,665,9675,10799,349,1331,,,5490.0,,,,3164.0,5538,9044,,,,, -27522,665,9675,6705,10269,1331,,,5490.0,,,,6992.0,229,9044,,,,, -64564,665,3637,8188,3756,1331,,,5490.0,,,,3488.0,4974,7184,,,,, -69978,665,7565,6701,19159,1331,,,5490.0,,,,12712.0,293,5494,,,,, -34968,665,3637,7684,24162,1331,,,5490.0,,,,2125.0,5309,5692,,,,, -23218,665,3637,245,13206,1331,,,5490.0,,,,,5309,5692,,,,, -71119,665,8881,2149,3205,1331,,,5490.0,,,,6073.0,9490,836,,,,, -66535,665,566,6705,10269,1331,,,5490.0,,,,6992.0,496,7067,,,,, -65054,665,11121,11258,18677,1331,,,5490.0,,,,12665.0,4843,5773,,,,, -74272,665,566,11554,17750,1331,,,5490.0,,,,6910.0,496,7067,,,,, -110821,665,9675,2960,7987,1331,,,5490.0,,,,3465.0,3906,3214,,,,, -146780,665,3637,12397,21443,1331,,,5490.0,,,,7050.0,6289,5692,,,,, -53661,665,5987,11289,23244,1331,,,5490.0,,,,8937.0,6804,10990,,,,, -115297,9034,3637,10182,380,1331,,,5490.0,,,,9044.0,11463,7184,,,,, -88141,665,8220,11258,18677,1331,,,5490.0,,,,12665.0,4645,7332,,,,, -108101,665,5987,6705,10269,1331,,,5490.0,,,,6992.0,271,2597,,,,, -152503,665,5987,11554,17750,1331,,,5490.0,,,,6910.0,271,2597,,,,, -88754,665,5987,2960,7987,1331,,,5490.0,,,,3465.0,271,2597,,,,, -17476,9034,2273,12445,8047,1331,,,5490.0,,,,13143.0,7739,5531,,,,, -1955,665,3637,3363,666,1331,,,5490.0,,,,7948.0,6289,5692,,,,, -118939,665,3637,10248,5700,1331,,,5490.0,,,,5350.0,6289,5692,,,,, -115691,665,5987,7754,13286,1331,,,5490.0,,,,13355.0,4102,192,,,,, -111900,665,5987,11554,17750,1331,,,5490.0,,,,6910.0,6804,10990,,,,, -99133,665,4351,11554,17750,1331,,,5490.0,,,,6910.0,3158,3294,,,,, -33439,665,4351,10248,5700,1331,,,5490.0,,,,5350.0,10401,2940,,,,, -76750,665,4351,11258,18677,1331,,,5490.0,,,,12665.0,3955,4266,,,,, -101677,665,4351,11554,17750,1331,,,5490.0,,,,6910.0,3955,4266,,,,, -7658,665,4351,757,20626,1331,,,5490.0,,,,2315.0,7938,758,,,,, -8349,665,4351,11289,23244,1331,,,5490.0,,,,8937.0,3955,4266,,,,, -83441,665,4351,2960,7987,1331,,,5490.0,,,,3465.0,3955,4266,,,,, -31987,665,4351,757,20626,1331,,,5490.0,,,,2315.0,3955,4266,,,,, -149415,7552,10002,9293,1136,1331,,,5490.0,,,,13107.0,9301,4502,,,,, -109005,889,3080,2677,11631,1331,,,5490.0,,,,,12081,1503,,,,, -51503,889,255,1957,19548,1331,,,5490.0,,,,,10678,7672,,,,, -92084,584,8881,933,23961,1331,,,5490.0,,,,5399.0,1933,836,,,,, -20482,584,8220,286,19980,1331,,,5490.0,,,,9158.0,2464,7332,,,,, -142288,584,3637,6366,21753,1331,,,5490.0,,,,,6548,7716,,,,, -26495,584,8220,7954,2585,1331,,,5490.0,,,,9965.0,2464,7332,,,,, -44120,889,5772,12734,19047,1331,,,5490.0,,,,,4378,10734,,,,, -25470,584,8220,6297,10755,1331,,,5490.0,,,,9721.0,2464,7332,,,,, -55155,584,8220,12707,5062,1331,,,5490.0,,,,10641.0,2464,7332,,,,, -59236,584,3228,8590,11654,1331,,,5490.0,,,,11500.0,6818,10843,,,,, -51768,584,7973,913,21672,1331,,,5490.0,,,,3441.0,7416,7132,,,,, -46767,889,5987,1789,4216,1331,,,5490.0,,,,,4102,192,,,,, -98773,584,8220,8901,6846,1331,,,5490.0,,,,1663.0,2464,7332,,,,, -102018,889,1815,995,12664,1331,,,5490.0,,,,4139.0,7236,6845,,,,, -16147,889,9425,3510,1739,1331,,,5490.0,,,,3252.0,2685,6242,,,,, -37396,889,4351,4335,5449,1331,,,5490.0,,,,11376.0,2443,7969,,,,, -110428,889,3637,8343,20660,1331,,,5490.0,,,,3918.0,5309,5692,,,,, -11390,889,8609,2960,7987,1331,,,5490.0,,,,,8379,1288,,,,, -18726,584,2123,2873,3314,1331,,,5490.0,,,,7270.0,9951,1400,,,,, -8677,889,5987,2836,20839,1331,,,5490.0,,,,11801.0,4102,192,,,,, -31211,889,5904,1957,19548,1331,,,5490.0,,,,,2790,3686,,,,, -123923,584,8220,693,17394,1331,,,5490.0,,,,,2464,7332,,,,, -88350,584,8220,1675,17892,1331,,,5490.0,,,,1614.0,2464,7332,,,,, -130425,584,8220,11192,21322,1331,,,5490.0,,,,11314.0,4645,7332,,,,, -17957,584,8220,913,21672,1331,,,5490.0,,,,3441.0,2464,7332,,,,, -148169,889,4351,4162,20465,1331,,,5490.0,,,,8503.0,4435,7969,,,,, -46432,889,9425,6584,13541,1331,,,5490.0,,,,6965.0,2685,6242,,,,, -117012,889,4351,765,5507,1331,,,5490.0,,,,12148.0,1376,374,,,,, -127289,889,1815,9018,16763,1331,,,5490.0,,,,9990.0,3899,6845,,,,, -129839,889,8499,7312,24175,1331,,,5490.0,,,,9424.0,6421,2112,,,,, -137161,889,3637,6416,9237,1331,,,5490.0,,,,1274.0,5309,5692,,,,, -86281,889,5987,564,14434,1331,,,5490.0,,,,,4102,192,,,,, -121552,889,9425,12206,19851,1331,,,5490.0,,,,10580.0,2685,6242,,,,, -5523,584,8220,8187,8921,1331,,,5490.0,,,,5043.0,2464,7332,,,,, -131839,584,8220,3520,5888,1331,,,5490.0,,,,10633.0,2464,7332,,,,, -75866,584,2123,6883,20216,1331,,,5490.0,,,,6350.0,8751,11099,,,,, -71809,889,5987,6911,21649,1331,,,5490.0,,,,1570.0,342,192,,,,, -134463,889,10181,3070,14517,1331,,,5490.0,,,,2327.0,10757,6956,,,,, -39004,584,8220,9583,4888,1331,,,5490.0,,,,5606.0,2464,7332,,,,, -89639,889,3637,12206,19851,1331,,,5490.0,,,,10580.0,5309,5692,,,,, -73506,8266,2704,2555,12270,1331,,,5490.0,,,,9705.0,9842,670,,,,, -139715,2487,10289,8577,23785,1331,,,5490.0,,,,2909.0,11400,5264,,,,, -92298,2487,10289,2836,20839,1331,,,5490.0,,,,12203.0,11400,5264,,,,, -112860,2487,10289,5502,18949,1331,,,5490.0,,,,5401.0,11400,5264,,,,, -94341,2487,10289,10960,23341,1331,,,5490.0,,,,12230.0,11400,5264,,,,, -84661,2487,5987,8577,23785,1331,,,5490.0,,,,2909.0,11655,8155,,,,, -127815,8266,2704,8418,22417,1331,,,5490.0,,,,5987.0,9842,670,,,,, -114637,2487,9284,11406,21765,1331,,,5490.0,,,,8760.0,2051,9025,,,,, -120662,8266,2734,2555,12270,1331,,,5490.0,,,,3356.0,6992,3891,,,,, -77654,2487,10289,10543,11301,1331,,,5490.0,,,,5797.0,11400,5264,,,,, -52859,2487,1941,865,18965,1331,,,5490.0,,,,1317.0,10167,10122,,,,, -99667,2487,5987,10402,21256,1331,,,5490.0,,,,4198.0,11655,8155,,,,, -39458,2487,10289,10402,21256,1331,,,5490.0,,,,4198.0,11400,5264,,,,, -137598,2487,2123,7653,5315,1331,,,5490.0,,,,12212.0,8088,8147,,,,, -42479,2487,5987,5502,18949,1331,,,5490.0,,,,5401.0,11655,8155,,,,, -47641,2487,562,7653,5315,1331,,,5490.0,,,,12212.0,9598,914,,,,, -138942,2487,6587,10960,23341,1331,,,5490.0,,,,12230.0,10651,6830,,,,, -3496,2487,10289,12854,1260,1331,,,5490.0,,,,3740.0,11400,5264,,,,, -66663,2487,3637,3502,7058,1331,,,5490.0,,,,7985.0,5309,5692,,,,, -144416,2487,10289,6526,13869,1331,,,5490.0,,,,4204.0,11400,5264,,,,, -150343,229,5248,8255,21747,1331,,,5490.0,,,,5302.0,507,1962,,,,, -52099,229,5987,4310,21923,1331,,,5490.0,,,,7701.0,4102,192,,,,, -121789,229,566,8255,21747,1331,,,5490.0,,,,5302.0,496,7067,,,,, -52153,229,1004,4824,16715,1331,,,5490.0,,,,8138.0,10961,10789,,,,, -95103,229,5987,8913,18400,1331,,,5490.0,,,,9117.0,4102,192,,,,, -102328,229,6847,7047,20430,1331,,,5490.0,,,,3158.0,5605,8226,,,,, -126774,229,7565,4824,16715,1331,,,5490.0,,,,8138.0,776,3975,,,,, -127133,229,4351,8255,21747,1331,,,5490.0,,,,5302.0,9451,8766,,,,, -69764,229,7565,7636,3074,1331,,,5490.0,,,,10449.0,293,5494,,,,, -93154,5895,5987,8255,21747,1331,,,5490.0,,,,,4102,192,,,,, -45928,229,5987,8255,21747,1331,,,5490.0,,,,5302.0,11454,8747,,,,, -9543,229,5987,2633,5260,1331,,,5490.0,,,,7738.0,4102,192,,,,, -41640,229,6810,8396,4809,1331,,,5490.0,,,,7303.0,471,4817,,,,, -10654,229,5987,4014,8773,1331,,,5490.0,,,,8388.0,4102,192,,,,, -16687,229,6810,5258,10596,1331,,,5490.0,,,,4544.0,471,4817,,,,, -25069,229,6810,5534,7595,1331,,,5490.0,,,,7577.0,471,4817,,,,, -137225,229,8598,8255,21747,1331,,,5490.0,,,,5302.0,1179,10074,,,,, -88948,229,4351,8255,21747,1331,,,5490.0,,,,5302.0,7108,6713,,,,, -39653,229,1004,8255,21747,1331,,,5490.0,,,,5302.0,854,4938,,,,, -137353,229,5248,4824,16715,1331,,,5490.0,,,,8138.0,3288,5861,,,,, -32231,229,9305,8255,21747,1331,,,5490.0,,,,5302.0,12050,8552,,,,, -48748,229,6810,4619,11842,1331,,,5490.0,,,,,471,4817,,,,, -24139,229,1004,4824,16715,1331,,,5490.0,,,,8138.0,854,4938,,,,, -119203,229,6810,4824,16715,1331,,,5490.0,,,,8138.0,7162,4817,,,,, -37449,229,5044,4824,16715,1331,,,5490.0,,,,8138.0,8452,1985,,,,, -98305,229,5987,8255,21747,1331,,,5490.0,,,,5302.0,271,2597,,,,, -106283,229,5987,564,14434,1331,,,5490.0,,,,316.0,4102,192,,,,, -83001,229,1004,8255,21747,1331,,,5490.0,,,,5302.0,10961,10789,,,,, -42771,229,5987,9913,1154,1331,,,5490.0,,,,4659.0,4102,192,,,,, -139650,229,9305,7636,3074,1331,,,5490.0,,,,10449.0,6002,5633,,,,, -105281,229,6810,12756,8578,1331,,,5490.0,,,,6332.0,471,4817,,,,, -140570,229,3087,11118,13079,1331,,,5490.0,,,,9512.0,9062,3458,,,,, -151531,5895,5987,11997,21650,1331,,,5490.0,,,,11501.0,4102,192,,,,, -58777,229,1004,7636,3074,1331,,,5490.0,,,,10449.0,6220,7308,,,,, -84286,229,1941,7636,3074,1331,,,5490.0,,,,10449.0,10167,10122,,,,, -51431,7508,183,12097,4178,1331,,16669.0,,,,,2792.0,7396,3649,1849.0,5535.0,,, -6859,7508,3575,5313,16218,1331,,16669.0,,,,,2048.0,8641,4813,1849.0,5535.0,,, -122615,7508,9051,5313,16218,1331,,16669.0,,,,,2048.0,5459,8021,1849.0,5535.0,,, -29681,7508,8812,8043,16726,1331,,16669.0,,,,,6360.0,11218,1352,1849.0,5535.0,,, -81739,7508,8006,8043,16726,1331,,16669.0,,,,,6360.0,11570,2035,1849.0,5535.0,,, -75396,7508,1264,12103,21026,1331,,16669.0,,,,,2319.0,294,6737,1849.0,5535.0,,, -1615,7508,5987,5313,16218,1331,,16669.0,,,,,2048.0,971,192,1849.0,5535.0,,, -81559,7508,4351,7788,1933,1331,,16669.0,,,,,9700.0,6346,2294,1849.0,5535.0,,, -142462,7508,4351,12103,21026,1331,,16669.0,,,,,2319.0,6346,2294,1849.0,5535.0,,, -54948,7508,4351,1317,8742,1331,,16669.0,,,,,4893.0,9550,4725,1849.0,5535.0,,, -91359,7508,4351,5313,16218,1331,,16669.0,,,,,2048.0,6346,2294,1849.0,5535.0,,, -102634,2253,5987,11649,9206,1331,,,5490.0,,,,12897.0,342,192,,,,, -57404,2253,5987,12081,20227,1331,,,5490.0,,,,3837.0,4102,192,,,,, -59301,2253,7127,6349,18224,1331,,,5490.0,,,,342.0,5537,6041,,,,, -46314,2253,8644,7278,22139,1331,,,5490.0,,,,5122.0,2352,1040,,,,, -99875,2253,8881,9178,10472,1331,,,5490.0,,,,2495.0,9490,836,,,,, -101072,2253,6743,10798,14751,1331,,,5490.0,,,,9281.0,5140,4357,,,,, -54214,2253,8644,12081,20227,1331,,,5490.0,,,,4620.0,2352,1040,,,,, -29372,2253,5987,7278,22139,1331,,,5490.0,,,,7573.0,4102,192,,,,, -16606,2253,5987,757,20626,1331,,,5490.0,,,,11146.0,4102,192,,,,, -36083,2253,5987,5502,18949,1331,,,5490.0,,,,9128.0,342,192,,,,, -90263,2253,1042,457,8629,1331,,,5490.0,,,,10905.0,10444,3856,,,,, -141677,2253,5622,6349,18224,1331,,,5490.0,,,,342.0,559,1123,,,,, -112438,2253,7973,1690,15078,1331,,,5490.0,,,,9394.0,11760,10316,,,,, -40469,2253,1941,10102,2413,1331,,,5490.0,,,,8328.0,10167,10122,,,,, -61710,2253,5987,9456,2238,1331,,,5490.0,,,,7941.0,4102,192,,,,, -146689,2253,4555,9643,3647,1331,,,5490.0,,,,11276.0,5174,241,,,,, -123770,2253,8644,6203,22276,1331,,,5490.0,,,,4005.0,2352,1040,,,,, -145101,2253,1941,5436,3310,1331,,,5490.0,,,,2108.0,898,10122,,,,, -16732,2253,1941,10245,6616,1331,,,5490.0,,,,11097.0,10167,10122,,,,, -141264,2253,5987,6203,22276,1331,,,5490.0,,,,4005.0,11454,8747,,,,, -33788,2253,2734,457,8629,1331,,,5490.0,,,,10905.0,6992,3891,,,,, -98588,2253,2734,8880,6505,1331,,,5490.0,,,,12136.0,84,6098,,,,, -55856,2253,4351,6349,18224,1331,,,5490.0,,,,342.0,1557,1323,,,,, -90501,2253,4351,1690,15078,1331,,,5490.0,,,,9394.0,1557,1323,,,,, -17854,2253,4351,6349,18224,1331,,,5490.0,,,,342.0,463,7738,,,,, -93949,2253,4351,1690,15078,1331,,,5490.0,,,,9394.0,463,7738,,,,, -83743,2253,4351,12597,22984,1331,,,5490.0,,,,8497.0,463,7738,,,,, -122179,2253,4351,1464,7793,1331,,,5490.0,,,,4455.0,463,7738,,,,, -72134,2253,4351,9643,3647,1331,,,5490.0,,,,11276.0,463,7738,,,,, -82182,2253,4351,4589,9396,1331,,,5490.0,,,,11234.0,463,7738,,,,, -98134,2253,4351,6355,22424,1331,,,5490.0,,,,5801.0,463,7738,,,,, -105276,2253,4351,457,8629,1331,,,5490.0,,,,10905.0,1272,2294,,,,, -80892,2253,4351,1983,18523,1331,,,5490.0,,,,13679.0,463,7738,,,,, -138861,2253,4351,3131,13151,1331,,,5490.0,,,,9955.0,463,7738,,,,, -125680,2253,4351,1464,7793,1331,,,5490.0,,,,4455.0,10344,10492,,,,, -62465,2253,4351,9808,2635,1331,,,5490.0,,,,5887.0,463,7738,,,,, -43261,2253,4351,7015,21437,1331,,,5490.0,,,,8370.0,7216,5174,,,,, -65994,2253,4351,8428,6179,1331,,,5490.0,,,,1370.0,463,7738,,,,, -44338,2253,4351,8579,8565,1331,,,5490.0,,,,5190.0,1557,1323,,,,, -97523,2253,4351,6203,22276,1331,,,5490.0,,,,4005.0,10401,2940,,,,, -88274,2253,4351,1716,23720,1331,,,5490.0,,,,1242.0,7216,5174,,,,, -84947,2253,4351,10804,16846,1331,,,5490.0,,,,7350.0,463,7738,,,,, -78897,2253,4351,2276,12297,1331,,,5490.0,,,,4111.0,463,7738,,,,, -19460,2253,4351,8801,10295,1331,,,5490.0,,,,10979.0,463,7738,,,,, -45054,2253,4351,12840,4293,1331,,,5490.0,,,,2696.0,7457,1323,,,,, -3724,2253,4351,8579,8565,1331,,,5490.0,,,,5190.0,6544,2324,,,,, -88025,2253,4351,9643,3647,1331,,,5490.0,,,,11276.0,4132,10267,,,,, -20599,6920,5987,12639,11106,1331,,,5490.0,,,,3916.0,342,192,,,,, -149124,4834,3962,5830,3432,1331,,,5490.0,,,,7824.0,1636,4542,,,,, -63370,4834,3962,11863,11571,1331,,,5490.0,,,,9856.0,9269,2063,,,,, -60927,4834,4880,8645,5004,1331,,,5490.0,,,,7989.0,8711,5316,,,,, -52404,4834,3962,1463,13009,1331,,,5490.0,,,,12139.0,4275,2063,,,,, -18949,6920,10181,1276,22405,1331,,,5490.0,,,,2705.0,2732,566,,,,, -67625,4834,3962,963,4234,1331,,,5490.0,,,,13601.0,9269,2063,,,,, -51784,4834,3962,1282,18682,1331,,,5490.0,,,,8989.0,4275,2063,,,,, -128512,4834,1004,8645,5004,1331,,,5490.0,,,,7989.0,10961,10789,,,,, -131302,6920,3087,5659,7261,1331,,,5490.0,,,,5010.0,6673,9982,,,,, -24005,4834,1004,8645,5004,1331,,,5490.0,,,,7989.0,854,4938,,,,, -52479,4834,7565,8645,5004,1331,,,5490.0,,,,7989.0,293,5494,,,,, -5760,4834,3080,7073,23818,1331,,,5490.0,,,,1898.0,8631,7975,,,,, -147602,4834,10181,1073,18068,1331,,,5490.0,,,,2651.0,7220,8765,,,,, -10640,4834,10181,9811,22231,1331,,,5490.0,,,,589.0,9884,8765,,,,, -93595,4834,3080,1073,18068,1331,,,5490.0,,,,2651.0,5484,1813,,,,, -16943,6920,2734,12639,11106,1331,,,5490.0,,,,3916.0,1045,5056,,,,, -127327,4834,3962,7853,12266,1331,,,5490.0,,,,11693.0,4275,2063,,,,, -54714,4834,3962,5830,3432,1331,,,5490.0,,,,7824.0,8342,6187,,,,, -60188,4834,225,798,18716,1331,,,5490.0,,,,10891.0,9529,6714,,,,, -113924,4834,3962,11750,20208,1331,,,5490.0,,,,13110.0,1636,4542,,,,, -24149,6920,7565,924,21423,1331,,,5490.0,,,,3325.0,293,5494,,,,, -120692,4834,3962,6041,2660,1331,,,5490.0,,,,4909.0,4275,2063,,,,, -123053,6920,5987,924,21423,1331,,,5490.0,,,,3325.0,342,192,,,,, -78517,4834,3962,9735,13036,1331,,,5490.0,,,,2852.0,4275,2063,,,,, -136540,4834,225,12816,2955,1331,,,5490.0,,,,13282.0,9529,6714,,,,, -56770,4834,5248,8645,5004,1331,,,5490.0,,,,7989.0,7110,9680,,,,, -115892,4834,2495,8645,5004,1331,,,5490.0,,,,7989.0,10328,5186,,,,, -149330,9633,7230,8490,11530,1331,,16669.0,,,,,13068.0,11185,10988,1849.0,5535.0,,, -120230,9633,7230,1947,19476,1331,,16669.0,,,,,6918.0,11185,10988,1849.0,5535.0,,, -118473,9633,4351,3273,21867,1331,,16669.0,,,,,6912.0,5611,3167,1849.0,5535.0,,, -41457,9633,7230,4110,20602,1331,,16669.0,,,,,9771.0,11185,10988,1849.0,5535.0,,, -55276,9633,7230,1947,12291,1331,,16669.0,,,,,4856.0,11185,10988,1849.0,5535.0,,, -42791,7659,4351,11141,10173,1331,,16669.0,,,,,10857.0,6346,2294,1849.0,5535.0,,, -54119,7659,4351,5193,15553,1331,,16669.0,,,,,13571.0,8007,118,1849.0,5535.0,,, -51285,7659,9675,4517,14075,1331,,16669.0,,,,,6033.0,1041,3214,1849.0,5535.0,,, -55583,7659,4351,4517,14075,1331,,16669.0,,,,,6033.0,6346,2294,1849.0,5535.0,,, -152488,10034,10574,12103,14043,1331,,,5490.0,,,,10617.0,7922,1613,,,,, -98325,7659,3600,5193,15553,1331,,16669.0,,,,,13571.0,7600,1586,1849.0,5535.0,,, -136758,2700,255,2965,23642,1331,,,5490.0,,,,966.0,827,10862,,,,, -60119,7659,4351,3795,16087,1331,,16669.0,,,,,12477.0,6346,2294,1849.0,5535.0,,, -11703,2700,8644,3887,183,1331,,,5490.0,,,,6827.0,12129,45,,,,, -95659,2700,10181,2045,18201,1331,,,5490.0,,,,6978.0,2434,9641,,,,, -86997,7659,5987,1264,10453,1331,,16669.0,,,,,7937.0,692,2597,1849.0,5535.0,,, -134716,2700,4351,7671,19255,1331,,,5490.0,,,,10761.0,2498,9470,,,,, -34136,2700,4351,6769,23457,1331,,,5490.0,,,,216.0,2498,9470,,,,, -70662,2700,4351,7938,8240,1331,,,5490.0,,,,5022.0,3870,3127,,,,, -123762,7659,255,3795,16087,1331,,16669.0,,,,,12477.0,10889,7672,1849.0,5535.0,,, -58274,2700,2734,9260,19762,1331,,,5490.0,,,,1426.0,4526,9165,,,,, -148981,2700,3575,3887,183,1331,,,5490.0,,,,6827.0,5361,4813,,,,, -16736,2700,1042,3887,183,1331,,,5490.0,,,,6827.0,10444,3856,,,,, -95110,2700,4351,4122,20791,1331,,,5490.0,,,,1735.0,5236,7272,,,,, -43316,2700,4351,3887,183,1331,,,5490.0,,,,6827.0,7500,2632,,,,, -94283,2700,9433,3887,183,1331,,,5490.0,,,,6827.0,3548,6892,,,,, -72347,2700,4351,11289,23244,1331,,,5490.0,,,,7034.0,3870,3127,,,,, -94264,2700,8644,11697,12815,1331,,,5490.0,,,,7172.0,7817,11228,,,,, -70625,2700,4351,11289,23244,1331,,,5490.0,,,,7034.0,5236,7272,,,,, -118474,2700,4351,11289,23244,1331,,,5490.0,,,,7034.0,7500,2632,,,,, -92735,2700,2734,1463,13009,1331,,,5490.0,,,,8045.0,5513,11371,,,,, -93664,2700,4351,1904,14127,1331,,,5490.0,,,,5893.0,1272,2294,,,,, -126651,2700,4351,5478,3145,1331,,,5490.0,,,,11028.0,5236,7272,,,,, -2601,2700,1296,3887,183,1331,,,5490.0,,,,6827.0,6175,266,,,,, -121941,2700,4351,1904,14127,1331,,,5490.0,,,,5893.0,5236,7272,,,,, -131680,2700,566,1904,14127,1331,,,5490.0,,,,5893.0,496,7067,,,,, -64594,2700,4351,11697,12815,1331,,,5490.0,,,,7172.0,3870,3127,,,,, -140307,2700,4351,2769,16084,1331,,,5490.0,,,,3265.0,1272,2294,,,,, -119028,2700,2734,2965,23642,1331,,,5490.0,,,,4475.0,84,6098,,,,, -32014,2700,4351,5487,9554,1331,,,5490.0,,,,6103.0,2498,9470,,,,, -15371,2700,5987,1904,14127,1331,,,5490.0,,,,5893.0,271,2597,,,,, -42228,2700,2734,10486,18241,1331,,,5490.0,,,,885.0,4526,9165,,,,, -49765,2700,5987,12229,19094,1331,,,5490.0,,,,11759.0,271,2597,,,,, -95955,2700,2734,4392,9298,1331,,,5490.0,,,,13378.0,5513,11371,,,,, -86113,2700,10347,8406,628,1331,,,5490.0,,,,9199.0,9013,2475,,,,, -146880,2700,4351,10496,17766,1331,,,5490.0,,,,264.0,2498,9470,,,,, -31815,2700,4351,2769,16084,1331,,,5490.0,,,,3265.0,10663,9470,,,,, -146126,2700,4351,10496,17766,1331,,,5490.0,,,,264.0,187,7272,,,,, -117743,2700,3493,3887,183,1331,,,5490.0,,,,6827.0,5926,2051,,,,, -50121,2700,10181,11697,12815,1331,,,5490.0,,,,7172.0,514,1799,,,,, -2371,2700,4351,12229,19094,1331,,,5490.0,,,,11759.0,5236,7272,,,,, -127277,2700,566,11289,23244,1331,,,5490.0,,,,7034.0,496,7067,,,,, -33457,2700,4351,12499,4680,1331,,,5490.0,,,,10434.0,9042,2097,,,,, -102325,2700,4351,7671,19255,1331,,,5490.0,,,,10761.0,9042,2097,,,,, -72241,2700,4351,11289,23244,1331,,,5490.0,,,,7034.0,1272,2294,,,,, -105384,2700,5987,10719,8159,1331,,,5490.0,,,,3212.0,342,192,,,,, -15937,2700,4351,10207,21944,1331,,,5490.0,,,,9818.0,9042,2097,,,,, -132531,2700,9675,2769,16084,1331,,,5490.0,,,,3265.0,420,2969,,,,, -62830,2700,4351,2769,16084,1331,,,5490.0,,,,3265.0,11562,2097,,,,, -20912,2700,7699,4122,20791,1331,,,5490.0,,,,1735.0,6864,3621,,,,, -88682,2700,4351,12499,4680,1331,,,5490.0,,,,10434.0,2498,9470,,,,, -77519,2700,4351,10207,21944,1331,,,5490.0,,,,9818.0,2498,9470,,,,, -17620,2700,4351,11697,12815,1331,,,5490.0,,,,7172.0,7500,2632,,,,, -35366,2700,4351,4122,20791,1331,,,5490.0,,,,1735.0,1272,2294,,,,, -18793,2700,10574,11697,12815,1331,,,5490.0,,,,7172.0,5388,1613,,,,, -9423,2700,4351,8447,11940,1331,,,5490.0,,,,7421.0,9042,2097,,,,, -133036,2700,255,1018,20367,1331,,,5490.0,,,,8237.0,827,10862,,,,, -44732,2700,2431,2045,18201,1331,,,5490.0,,,,6978.0,4152,6170,,,,, -107150,2700,4351,7938,8240,1331,,,5490.0,,,,5022.0,5236,7272,,,,, -117279,2700,5987,10496,17766,1331,,,5490.0,,,,264.0,4102,192,,,,, -125689,2700,4351,7938,8240,1331,,,5490.0,,,,5022.0,10663,9470,,,,, -76836,2700,8600,11289,23244,1331,,,5490.0,,,,7034.0,8747,7989,,,,, -7573,2700,566,12229,19094,1331,,,5490.0,,,,11759.0,496,7067,,,,, -94634,2700,9675,12229,19094,1331,,,5490.0,,,,11759.0,420,2969,,,,, -32886,2700,4351,12229,19094,1331,,,5490.0,,,,11759.0,10663,9470,,,,, -104258,2700,4351,9465,481,1331,,,5490.0,,,,7767.0,2498,9470,,,,, -30325,2700,5987,7938,8240,1331,,,5490.0,,,,5022.0,271,2597,,,,, -75482,2700,5987,12206,19851,1331,,,5490.0,,,,11913.0,342,192,,,,, -75245,2700,4351,4122,20791,1331,,,5490.0,,,,1735.0,3870,3127,,,,, -4022,2700,4351,12748,18235,1331,,,5490.0,,,,149.0,9042,2097,,,,, -71386,2700,4351,1994,9287,1331,,,5490.0,,,,12875.0,5236,7272,,,,, -139341,2700,4351,6769,23457,1331,,,5490.0,,,,216.0,9042,2097,,,,, -132605,7659,4351,8374,14572,1331,,16669.0,,,,,12842.0,527,8825,1849.0,5535.0,,, -96863,2700,4351,8447,11940,1331,,,5490.0,,,,7421.0,2498,9470,,,,, -93150,2700,5987,11289,23244,1331,,,5490.0,,,,7034.0,271,2597,,,,, -29482,2700,10347,12229,19094,1331,,,5490.0,,,,2544.0,2339,2475,,,,, -116558,2700,4351,7938,8240,1331,,,5490.0,,,,5022.0,1272,2294,,,,, -4684,2700,10181,3887,183,1331,,,5490.0,,,,6827.0,514,1799,,,,, -73909,2700,4351,12748,18235,1331,,,5490.0,,,,149.0,2498,9470,,,,, -97855,7659,4351,1264,10453,1331,,16669.0,,,,,7937.0,6346,2294,1849.0,5535.0,,, -109491,7659,9675,3795,16087,1331,,16669.0,,,,,12477.0,1041,3214,1849.0,5535.0,,, -146722,2700,566,3887,183,1331,,,5490.0,,,,6827.0,496,7067,,,,, -91197,2700,4351,12229,19094,1331,,,5490.0,,,,11759.0,1272,2294,,,,, -38628,2700,4351,4046,3456,1331,,,5490.0,,,,2833.0,5236,7272,,,,, -79700,2700,4351,10207,21944,1331,,,5490.0,,,,9818.0,187,7272,,,,, -143477,2700,1042,4122,20791,1331,,,5490.0,,,,1735.0,10444,3856,,,,, -92361,2700,4351,12590,4092,1331,,,5490.0,,,,11086.0,10663,9470,,,,, -14997,2700,4351,11697,12815,1331,,,5490.0,,,,7172.0,1272,2294,,,,, -63342,2700,4351,1496,21359,1331,,,5490.0,,,,472.0,2498,9470,,,,, -120528,2700,4351,3281,19634,1331,,,5490.0,,,,9852.0,2498,9470,,,,, -79324,8152,6810,900,10177,1331,,,5490.0,,,,6652.0,5996,11201,,,,, -136801,2307,2734,12004,5958,1331,,,5490.0,,,,282.0,11653,5056,,,,, -73243,116,4351,11283,14033,1331,,,5490.0,,,,1557.0,8883,1419,,,,, -138438,2307,2734,1923,18654,1331,,,5490.0,,,,7800.0,10721,10970,,,,, -66709,2307,2734,5753,4626,1331,,,5490.0,,,,11576.0,11653,5056,,,,, -62643,8152,4351,4122,20791,1331,,,5490.0,,,,8213.0,3354,3113,,,,, -146223,8152,4351,6549,14351,1331,,,5490.0,,,,8146.0,3354,3113,,,,, -79298,2307,2734,1923,18654,1331,,,5490.0,,,,7800.0,1045,5056,,,,, -79119,8152,10002,1986,23382,1331,,,5490.0,,,,12798.0,9301,4502,,,,, -60837,8152,10181,924,21423,1331,,,5490.0,,,,1106.0,5222,4027,,,,, -91118,116,4351,9499,19378,1331,,,5490.0,,,,,7583,7509,,,,, -82859,8152,3080,5506,13264,1331,,,5490.0,,,,11994.0,769,9646,,,,, -17002,2307,2734,2555,12270,1331,,,5490.0,,,,8508.0,1045,5056,,,,, -139557,8152,3228,5006,4393,1331,,,5490.0,,,,667.0,6818,10843,,,,, -82902,8152,255,10201,21330,1331,,,5490.0,,,,,10678,7672,,,,, -72158,8152,10181,1923,18654,1331,,,5490.0,,,,4652.0,5222,4027,,,,, -39205,8152,10181,6549,14351,1331,,,5490.0,,,,8146.0,5222,4027,,,,, -108802,8152,4351,7408,12272,1331,,,5490.0,,,,10591.0,3354,3113,,,,, -44813,8152,7973,9063,10243,1331,,,5490.0,,,,9785.0,11760,10316,,,,, -146404,8152,10181,7502,17073,1331,,,5490.0,,,,2532.0,10609,4027,,,,, -138344,8152,10181,8523,6872,1331,,,5490.0,,,,7972.0,5222,4027,,,,, -93483,8152,2123,715,1969,1331,,,5490.0,,,,5980.0,4701,11099,,,,, -60977,2307,2734,12004,5958,1331,,,5490.0,,,,282.0,8450,11159,,,,, -128515,8152,10181,8987,8977,1331,,,5490.0,,,,13574.0,5222,4027,,,,, -9145,8152,5091,1276,22405,1331,,,5490.0,,,,10326.0,1476,1616,,,,, -46575,8152,1296,10201,21330,1331,,,5490.0,,,,,8029,266,,,,, -146423,8152,4351,5506,13264,1331,,,5490.0,,,,11994.0,3354,3113,,,,, -64620,8152,10181,7408,12272,1331,,,5490.0,,,,10591.0,5222,4027,,,,, -51828,550,10802,5455,5898,1331,,,5490.0,,,,13379.0,2737,1719,,,,, -56304,10728,566,1893,1726,1331,,,5490.0,,,,4674.0,496,7067,,,,, -108541,10728,4351,2393,7359,1331,,,5490.0,,,,2765.0,5566,6758,,,,, -49558,2750,2734,4217,20407,1331,,,5490.0,,,,2827.0,7698,1035,,,,, -85216,10133,4233,12824,4546,1331,,,5490.0,,,,6321.0,3016,6002,,,,, -91572,5111,4493,3763,10600,1331,,,5490.0,,,,4480.0,6075,3507,,,,, -73956,10728,1435,6300,5515,1331,,,5490.0,,,,916.0,7993,7839,,,,, -48984,10275,3637,10121,19454,1331,,,5490.0,,,,8210.0,11463,7184,,,,, -142573,10728,566,219,23469,1331,,,5490.0,,,,11746.0,496,7067,,,,, -64314,10728,955,9090,13934,1331,,,5490.0,,,,1623.0,9552,3593,,,,, -29365,550,1004,11019,22714,1331,,,5490.0,,,,1069.0,10961,10789,,,,, -49651,10728,5987,12068,15316,1331,,,5490.0,,,,11605.0,342,192,,,,, -29895,10728,1004,219,23469,1331,,,5490.0,,,,11746.0,10961,10789,,,,, -129088,10133,7565,3166,23012,1331,,,5490.0,,,,3341.0,776,3975,,,,, -133995,10728,1941,11365,1678,1331,,,5490.0,,,,12444.0,10167,10122,,,,, -72054,10728,7583,566,5478,1331,,,5490.0,,,,3541.0,4980,3722,,,,, -118335,5111,5987,2894,21368,1331,,,5490.0,,,,13287.0,4102,192,,,,, -11262,2750,2734,7502,17073,1331,,,5490.0,,,,10124.0,2539,1131,,,,, -36486,550,10802,4217,20407,1331,,,5490.0,,,,3988.0,2737,1719,,,,, -47382,10728,4351,9090,13934,1331,,,5490.0,,,,6252.0,8212,637,,,,, -84012,905,4351,8139,3884,1331,,,5490.0,,,,10668.0,7216,5174,,,,, -87336,550,6923,4149,20375,1331,,,5490.0,,,,3288.0,12145,5318,,,,, -17728,2750,6810,6337,1859,1331,,,5490.0,,,,2816.0,12107,8083,,,,, -40296,10133,3962,4401,13613,1331,,,5490.0,,,,452.0,9648,8086,,,,, -73394,10728,8841,3781,594,1331,,,5490.0,,,,7968.0,993,4939,,,,, -118410,10728,1004,1893,1726,1331,,,5490.0,,,,4674.0,6220,7308,,,,, -16806,905,4448,9667,19409,1331,,,5490.0,,,,11331.0,8695,7688,,,,, -66479,10133,3962,6053,20022,1331,,,5490.0,,,,,12005,4247,,,,, -90328,10728,4448,2393,7359,1331,,,5490.0,,,,2765.0,8695,7688,,,,, -46784,10133,3080,7412,12094,1331,,,5490.0,,,,183.0,4263,2367,,,,, -82216,10728,562,219,23469,1331,,,5490.0,,,,11746.0,1203,6643,,,,, -412,10728,566,12068,15316,1331,,,5490.0,,,,11605.0,496,7067,,,,, -5045,10728,3087,3398,8402,1331,,,5490.0,,,,3757.0,9062,3458,,,,, -22956,550,2734,9500,16635,1331,,,5490.0,,,,2304.0,5574,169,,,,, -142801,2750,6810,12753,13301,1331,,,5490.0,,,,1862.0,10079,4473,,,,, -151340,10728,7665,6300,5515,1331,,,5490.0,,,,916.0,11748,4356,,,,, -67296,10728,7583,696,9377,1331,,,5490.0,,,,6107.0,6030,5989,,,,, -41923,10728,8600,6300,5515,1331,,,5490.0,,,,916.0,8747,7989,,,,, -117410,10728,8127,11242,9728,1331,,,5490.0,,,,11470.0,9838,570,,,,, -139897,10728,4448,924,21423,1331,,,5490.0,,,,10542.0,12228,8718,,,,, -6300,2750,2734,8041,6798,1331,,,5490.0,,,,711.0,7698,1035,,,,, -71404,10728,4351,3781,594,1331,,,5490.0,,,,7968.0,4928,8818,,,,, -77825,905,1004,5863,1194,1331,,,5490.0,,,,6118.0,6220,7308,,,,, -94778,10728,4448,10522,4738,1331,,,5490.0,,,,9127.0,8695,7688,,,,, -91779,10728,566,9293,1136,1331,,,5490.0,,,,13617.0,496,7067,,,,, -40277,550,6648,11019,22714,1331,,,5490.0,,,,1069.0,5281,1610,,,,, -22792,10728,1941,1876,19568,1331,,,5490.0,,,,3734.0,10167,10122,,,,, -136217,10728,7565,2393,7359,1331,,,5490.0,,,,2765.0,293,5494,,,,, -125663,10728,7230,11242,9728,1331,,,5490.0,,,,11470.0,11277,10988,,,,, -66229,10133,1004,3166,23012,1331,,,5490.0,,,,3341.0,10961,10789,,,,, -9095,905,6805,7302,15180,1331,,,5490.0,,,,8712.0,9060,7178,,,,, -73836,10728,11044,3731,16960,1331,,,5490.0,,,,9412.0,3423,10109,,,,, -116018,10728,7230,10095,3395,1331,,,5490.0,,,,3630.0,11277,10988,,,,, -121817,10728,4448,1893,1726,1331,,,5490.0,,,,4674.0,8695,7688,,,,, -63363,550,2734,10891,5037,1331,,,5490.0,,,,13689.0,84,6098,,,,, -136254,10728,5205,3731,16960,1331,,,5490.0,,,,9412.0,6478,844,,,,, -148707,10728,1435,11806,11363,1331,,,5490.0,,,,3869.0,7993,7839,,,,, -127694,10728,5987,1893,1726,1331,,,5490.0,,,,4674.0,11454,8747,,,,, -78192,10728,566,12498,1026,1331,,,5490.0,,,,6554.0,496,7067,,,,, -52646,550,7709,4631,16086,1331,,,5490.0,,,,2249.0,3948,2570,,,,, -129120,550,1004,4149,20375,1331,,,5490.0,,,,3288.0,10961,10789,,,,, -118757,10728,2123,4717,2168,1331,,,5490.0,,,,1820.0,8751,11099,,,,, -33593,10728,1004,424,1544,1331,,,5490.0,,,,8084.0,6220,7308,,,,, -135746,10728,4351,2393,7359,1331,,,5490.0,,,,2765.0,12161,4725,,,,, -8324,10728,4351,4419,19766,1331,,,5490.0,,,,10053.0,8212,637,,,,, -44090,550,4448,3893,7064,1331,,,5490.0,,,,4949.0,12228,8718,,,,, -1094,10728,1004,2393,7359,1331,,,5490.0,,,,2765.0,6220,7308,,,,, -79187,10699,3080,9293,1136,1331,,,5490.0,,,,7844.0,8879,11361,,,,, -99692,10133,225,7412,12094,1331,,,5490.0,,,,183.0,9529,6714,,,,, -7697,10133,2870,3166,23012,1331,,,5490.0,,,,3341.0,4289,4768,,,,, -5405,10728,4351,11242,9728,1331,,,5490.0,,,,11470.0,1759,10739,,,,, -152451,10728,2142,9293,1136,1331,,,5490.0,,,,13617.0,9372,9736,,,,, -135625,10728,7565,424,1544,1331,,,5490.0,,,,8084.0,293,5494,,,,, -19823,550,5193,2070,12178,1331,,,5490.0,,,,12437.0,10497,7014,,,,, -62394,5111,1296,7887,21946,1331,,,5490.0,,,,4969.0,8029,266,,,,, -81412,10728,6597,6300,5515,1331,,,5490.0,,,,916.0,3346,10111,,,,, -101501,550,1004,3893,7064,1331,,,5490.0,,,,4949.0,6220,7308,,,,, -128554,905,4448,5863,1194,1331,,,5490.0,,,,6118.0,8695,7688,,,,, -27047,550,2734,9990,10656,1331,,,5490.0,,,,12360.0,10721,10970,,,,, -112501,550,10802,8847,13219,1331,,,5490.0,,,,1657.0,8133,1719,,,,, -100620,550,5987,9990,10656,1331,,,5490.0,,,,12360.0,8239,8155,,,,, -137771,905,4351,11886,22117,1331,,,5490.0,,,,1362.0,5029,1419,,,,, -99485,905,7565,5863,1194,1331,,,5490.0,,,,6118.0,293,5494,,,,, -86584,10728,7565,3072,22322,1331,,,5490.0,,,,11466.0,293,5494,,,,, -95473,10728,566,11365,1678,1331,,,5490.0,,,,12444.0,496,7067,,,,, -19487,550,1941,7932,18329,1331,,,5490.0,,,,11598.0,10167,10122,,,,, -35822,550,4351,12054,18743,1331,,,5490.0,,,,3604.0,463,7738,,,,, -8875,10728,1004,10522,4738,1331,,,5490.0,,,,9127.0,6220,7308,,,,, -45253,10728,8841,10095,3395,1331,,,5490.0,,,,3630.0,993,4939,,,,, -105563,550,6847,4769,1790,1331,,,5490.0,,,,2074.0,5605,8226,,,,, -44031,550,7565,4149,20375,1331,,,5490.0,,,,3288.0,293,5494,,,,, -137146,10728,9284,3072,22322,1331,,,5490.0,,,,11466.0,2691,5807,,,,, -64163,5111,3160,7887,21946,1331,,,5490.0,,,,4969.0,468,9518,,,,, -22025,905,4351,5863,1194,1331,,,5490.0,,,,6118.0,8607,3368,,,,, -10329,550,7565,11019,22714,1331,,,5490.0,,,,1069.0,6921,6205,,,,, -63434,2750,6810,4667,15577,1331,,,5490.0,,,,10048.0,12107,8083,,,,, -114290,10728,4351,2393,7359,1331,,,5490.0,,,,2765.0,8607,3368,,,,, -129485,10728,4351,2393,7359,1331,,,5490.0,,,,2765.0,2443,7969,,,,, -22412,10728,4351,6300,5515,1331,,,5490.0,,,,916.0,4928,8818,,,,, -53813,10728,3575,12068,15316,1331,,,5490.0,,,,11605.0,5361,4813,,,,, -133268,10699,2734,11063,13550,1331,,,5490.0,,,,537.0,6391,7789,,,,, -70729,10728,4448,424,1544,1331,,,5490.0,,,,8084.0,8695,7688,,,,, -37223,10728,566,6300,5515,1331,,,5490.0,,,,916.0,496,7067,,,,, -16349,10728,7565,10522,4738,1331,,,5490.0,,,,9127.0,293,5494,,,,, -8537,10699,4493,2622,15821,1331,,,5490.0,,,,10456.0,6634,2711,,,,, -109704,10728,4351,8923,17405,1331,,,5490.0,,,,13047.0,5695,3655,,,,, -55354,10728,1941,11242,9728,1331,,,5490.0,,,,11470.0,10167,10122,,,,, -67069,10728,7565,1893,1726,1331,,,5490.0,,,,4674.0,293,5494,,,,, -100888,2750,4277,4217,20407,1331,,,5490.0,,,,2827.0,1092,10993,,,,, -100249,10728,5987,6300,5515,1331,,,5490.0,,,,916.0,11454,8747,,,,, -2822,10728,1004,3072,22322,1331,,,5490.0,,,,11466.0,6220,7308,,,,, -56020,905,7565,9667,19409,1331,,,5490.0,,,,11331.0,293,5494,,,,, -16263,10728,7565,219,23469,1331,,,5490.0,,,,11746.0,293,5494,,,,, -12783,10699,4351,11563,8635,1331,,,5490.0,,,,1997.0,7144,8490,,,,, -99876,550,7565,3893,7064,1331,,,5490.0,,,,4949.0,293,5494,,,,, -48203,905,1004,9667,19409,1331,,,5490.0,,,,11331.0,6220,7308,,,,, -49641,10133,3962,10655,9580,1331,,,5490.0,,,,6815.0,12005,4247,,,,, -129655,10728,6910,6300,5515,1331,,,5490.0,,,,916.0,11748,4356,,,,, -128043,2750,4277,8041,6798,1331,,,5490.0,,,,711.0,1092,10993,,,,, -64265,10728,2142,6300,5515,1331,,,5490.0,,,,916.0,9372,9736,,,,, diff --git a/exports/observation.sql b/exports/observation.sql deleted file mode 100644 index 012b32b..0000000 --- a/exports/observation.sql +++ /dev/null @@ -1,24 +0,0 @@ -SELECT -(SELECT encoded FROM wgan_original_pseudo.map WHERE table='observation' AND field = 'observation_id' AND CAST(values AS STRING)=CAST(observation.observation_id AS STRING ) ) as observation_id, - (SELECT encoded FROM wgan_original_pseudo.map WHERE table='observation' AND field = 'person_id' AND CAST(values AS STRING)=CAST(observation.person_id AS STRING ) ) as person_id, - (SELECT encoded FROM wgan_original_pseudo.map WHERE table='observation' AND field = 'observation_concept_id' AND CAST(values AS STRING)=CAST(observation.observation_concept_id AS STRING ) ) as observation_concept_id, - (SELECT encoded FROM wgan_original_pseudo.map WHERE table='observation' AND field = 'observation_date' AND CAST(values AS STRING)=CAST(observation.observation_date AS STRING ) ) as observation_date, - (SELECT encoded FROM wgan_original_pseudo.map WHERE table='observation' AND field = 'observation_datetime' AND CAST(values AS STRING)=CAST(observation.observation_datetime AS STRING ) ) as observation_datetime, - (SELECT encoded FROM wgan_original_pseudo.map WHERE table='observation' AND field = 'observation_type_concept_id' AND CAST(values AS STRING)=CAST(observation.observation_type_concept_id AS STRING ) ) as observation_type_concept_id, - (SELECT encoded FROM wgan_original_pseudo.map WHERE table='observation' AND field = 'value_as_number' AND CAST(values AS STRING)=CAST(observation.value_as_number AS STRING ) ) as value_as_number, - (SELECT encoded FROM wgan_original_pseudo.map WHERE table='observation' AND field = 'value_as_string' AND CAST(values AS STRING)=CAST(observation.value_as_string AS STRING ) ) as value_as_string, - (SELECT encoded FROM wgan_original_pseudo.map WHERE table='observation' AND field = 'value_as_concept_id' AND CAST(values AS STRING)=CAST(observation.value_as_concept_id AS STRING ) ) as value_as_concept_id, - (SELECT encoded FROM wgan_original_pseudo.map WHERE table='observation' AND field = 'qualifier_concept_id' AND CAST(values AS STRING)=CAST(observation.qualifier_concept_id AS STRING ) ) as qualifier_concept_id, - (SELECT encoded FROM wgan_original_pseudo.map WHERE table='observation' AND field = 'unit_concept_id' AND CAST(values AS STRING)=CAST(observation.unit_concept_id AS STRING ) ) as unit_concept_id, - (SELECT encoded FROM wgan_original_pseudo.map WHERE table='observation' AND field = 'provider_id' AND CAST(values AS STRING)=CAST(observation.provider_id AS STRING ) ) as provider_id, - (SELECT encoded FROM wgan_original_pseudo.map WHERE table='observation' AND field = 'visit_occurrence_id' AND CAST(values AS STRING)=CAST(observation.visit_occurrence_id AS STRING ) ) as visit_occurrence_id, - (SELECT encoded FROM wgan_original_pseudo.map WHERE table='observation' AND field = 'observation_source_value' AND CAST(values AS STRING)=CAST(observation.observation_source_value AS STRING ) ) as observation_source_value, - (SELECT encoded FROM wgan_original_pseudo.map WHERE table='observation' AND field = 'observation_source_concept_id' AND CAST(values AS STRING)=CAST(observation.observation_source_concept_id AS STRING ) ) as observation_source_concept_id, - (SELECT encoded FROM wgan_original_pseudo.map WHERE table='observation' AND field = 'unit_source_value' AND CAST(values AS STRING)=CAST(observation.unit_source_value AS STRING ) ) as unit_source_value, - (SELECT encoded FROM wgan_original_pseudo.map WHERE table='observation' AND field = 'qualifier_source_value' AND CAST(values AS STRING)=CAST(observation.qualifier_source_value AS STRING ) ) as qualifier_source_value, - (SELECT encoded FROM wgan_original_pseudo.map WHERE table='observation' AND field = 'value_source_concept_id' AND CAST(values AS STRING)=CAST(observation.value_source_concept_id AS STRING ) ) as value_source_concept_id, - (SELECT encoded FROM wgan_original_pseudo.map WHERE table='observation' AND field = 'value_source_value' AND CAST(values AS STRING)=CAST(observation.value_source_value AS STRING ) ) as value_source_value, - (SELECT encoded FROM wgan_original_pseudo.map WHERE table='observation' AND field = 'questionnaire_response_id' AND CAST(values AS STRING)=CAST(observation.questionnaire_response_id AS STRING ) ) as questionnaire_response_id -FROM wgan_original.observation -WHERE -REGEXP_CONTAINS(UPPER(observation_source_value),'ICD') \ No newline at end of file diff --git a/exports/sample.csv b/exports/sample.csv deleted file mode 100644 index 836bc9d..0000000 --- a/exports/sample.csv +++ /dev/null @@ -1,10 +0,0 @@ -id,first_name,last_name,age,gender -100,steve,nyemba,40,m -101,elon,nyemba,5,m -200,steve,mqueen,80,m -201,james,dean,80,m -300,james,bond,50,m -400,elon,musk,40,m -401,kevin,james,50,m -303,kevin,johnson,40,m -103,Bari,nyemba,5,f diff --git a/test.py b/test.py deleted file mode 100644 index 717fc93..0000000 --- a/test.py +++ /dev/null @@ -1,287 +0,0 @@ -import tensorflow as tf -from tensorflow.contrib.layers import l2_regularizer -import numpy as np -import time -import os -# os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID" -# os.environ['CUDA_VISIBLE_DEVICES'] = "4,5" -# os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' - - -FLAGS = tf.app.flags.FLAGS - -tf.app.flags.DEFINE_string('train_dir', 'google_cloud_test/', - """Directory where to store checkpoint. """) -tf.app.flags.DEFINE_string('save_dir', 'google_cloud_test/', - """Directory where to save generated data. """) -tf.app.flags.DEFINE_integer('max_steps', 100, - """Number of batches to run in each epoch.""") -tf.app.flags.DEFINE_integer('max_epochs', 100, - """Number of epochs to run.""") -tf.app.flags.DEFINE_integer('batchsize', 10, - """Batchsize.""") -tf.app.flags.DEFINE_integer('z_dim', 10, - """Dimensionality of random input.""") -tf.app.flags.DEFINE_integer('data_dim', 30, - """Dimensionality of data.""") -tf.app.flags.DEFINE_integer('demo_dim', 8, - """Dimensionality of demographics.""") -tf.app.flags.DEFINE_float('reg', 0.0001, - """L2 regularization.""") - -g_structure = [FLAGS.z_dim, FLAGS.z_dim] -d_structure = [FLAGS.data_dim, int(FLAGS.data_dim/2), FLAGS.z_dim] - - -def _variable_on_cpu(name, shape, initializer=None): - with tf.device('/cpu:0'): - var = tf.get_variable(name, shape, initializer=initializer) - return var - - -def batchnorm(inputs, name, labels=None, n_labels=None): - mean, var = tf.nn.moments(inputs, [0], keep_dims=True) - shape = mean.shape[1].value - offset_m = _variable_on_cpu(shape=[n_labels,shape], name='offset'+name, - initializer=tf.zeros_initializer) - scale_m = _variable_on_cpu(shape=[n_labels,shape], name='scale'+name, - initializer=tf.ones_initializer) - offset = tf.nn.embedding_lookup(offset_m, labels) - scale = tf.nn.embedding_lookup(scale_m, labels) - result = tf.nn.batch_normalization(inputs, mean, var, offset, scale, 1e-8) - return result - - -def layernorm(inputs, name, labels=None, n_labels=None): - mean, var = tf.nn.moments(inputs, [1], keep_dims=True) - shape = inputs.shape[1].value - offset_m = _variable_on_cpu(shape=[n_labels,shape], name='offset'+name, - initializer=tf.zeros_initializer) - scale_m = _variable_on_cpu(shape=[n_labels,shape], name='scale'+name, - initializer=tf.ones_initializer) - offset = tf.nn.embedding_lookup(offset_m, labels) - scale = tf.nn.embedding_lookup(scale_m, labels) - result = tf.nn.batch_normalization(inputs, mean, var, offset, scale, 1e-8) - return result - - -def input_fn(): - features_placeholder = tf.placeholder(shape=[None, FLAGS.data_dim], dtype=tf.float32) - labels_placeholder = tf.placeholder(shape=[None, 6], dtype=tf.float32) - dataset = tf.data.Dataset.from_tensor_slices((features_placeholder, labels_placeholder)) - dataset = dataset.repeat(10000) - dataset = dataset.batch(batch_size=FLAGS.batchsize) - dataset = dataset.prefetch(1) - iterator = dataset.make_initializable_iterator() - return iterator, features_placeholder, labels_placeholder - - -def generator(z, label): - x = z - tmp_dim = FLAGS.z_dim - with tf.variable_scope('G', reuse=tf.AUTO_REUSE, regularizer=l2_regularizer(FLAGS.reg)): - for i, dim in enumerate(g_structure[:-1]): - kernel = _variable_on_cpu('W_' + str(i), shape=[tmp_dim, dim]) - h1 = batchnorm(tf.matmul(x, kernel), name='cbn' + str(i), labels=label, n_labels=FLAGS.demo_dim) - h2 = tf.nn.relu(h1) - x = x + h2 - tmp_dim = dim - i = len(g_structure) - 1 - kernel = _variable_on_cpu('W_' + str(i), shape=[tmp_dim, g_structure[-1]]) - h1 = batchnorm(tf.matmul(x, kernel), name='cbn' + str(i), - labels=label, n_labels=FLAGS.demo_dim) - h2 = tf.nn.tanh(h1) - x = x + h2 - - kernel = _variable_on_cpu('W_' + str(i+1), shape=[FLAGS.z_dim, FLAGS.data_dim]) - bias = _variable_on_cpu('b_' + str(i+1), shape=[FLAGS.data_dim]) - x = tf.nn.sigmoid(tf.add(tf.matmul(x, kernel), bias)) - return x - - -def discriminator(x, label): - with tf.variable_scope('D', reuse=tf.AUTO_REUSE, regularizer=l2_regularizer(FLAGS.reg)): - for i, dim in enumerate(d_structure[1:]): - kernel = _variable_on_cpu('W_' + str(i), shape=[d_structure[i], dim]) - bias = _variable_on_cpu('b_' + str(i), shape=[dim]) - x = tf.nn.relu(tf.add(tf.matmul(x, kernel), bias)) - x = layernorm(x, name='cln' + str(i), labels=label, n_labels=FLAGS.demo_dim) - i = len(d_structure) - kernel = _variable_on_cpu('W_' + str(i), shape=[d_structure[-1], 1]) - bias = _variable_on_cpu('b_' + str(i), shape=[1]) - y = tf.add(tf.matmul(x, kernel), bias) - return y - - -def compute_dloss(real, fake, label): - epsilon = tf.random_uniform( - shape=[FLAGS.batchsize, 1], - minval=0., - maxval=1.) - x_hat = real + epsilon * (fake - real) - y_hat_fake = discriminator(fake, label) - y_hat_real = discriminator(real, label) - y_hat = discriminator(x_hat, label) - - grad = tf.gradients(y_hat, [x_hat])[0] - slopes = tf.sqrt(tf.reduce_sum(tf.square(grad), 1)) - gradient_penalty = tf.reduce_mean((slopes - 1.) ** 2) - all_regs = tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES) - w_distance = -tf.reduce_mean(y_hat_real) + tf.reduce_mean(y_hat_fake)+sum(all_regs) - loss = w_distance + 10 * gradient_penalty - tf.add_to_collection('dlosses', loss) - - return w_distance, loss - - -def compute_gloss(fake, label): - y_hat_fake = discriminator(fake, label) - all_regs = tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES) - loss = -tf.reduce_mean(y_hat_fake)+sum(all_regs) - tf.add_to_collection('glosses', loss) - return loss, loss - - -def tower_loss(scope, stage, real, label): - label = tf.cast(label, tf.int32) - print ([stage,label.shape]) - label = label[:, 1] * 4 + tf.squeeze( - tf.matmul(label[:, 2:], tf.constant([[0], [1], [2], [3]], dtype=tf.int32))) - z = tf.random_normal(shape=[FLAGS.batchsize, FLAGS.z_dim]) - fake = generator(z, label) - if stage == 'D': - w, loss = compute_dloss(real, fake, label) - losses = tf.get_collection('dlosses', scope) - else: - w, loss = compute_gloss(fake, label) - losses = tf.get_collection('glosses', scope) - - total_loss = tf.add_n(losses, name='total_loss') - return total_loss, w - - -def average_gradients(tower_grads): - average_grads = [] - for grad_and_vars in zip(*tower_grads): - grads = [] - for g, _ in grad_and_vars: - expanded_g = tf.expand_dims(g, 0) - grads.append(expanded_g) - - grad = tf.concat(axis=0, values=grads) - grad = tf.reduce_mean(grad, 0) - - v = grad_and_vars[0][1] - grad_and_var = (grad, v) - average_grads.append(grad_and_var) - return average_grads - - -def graph(stage, opt): - tower_grads = [] - per_gpu_w = [] - iterator, features_placeholder, labels_placeholder = input_fn() - with tf.variable_scope(tf.get_variable_scope()): - for i in range(1): - with tf.device('/cpu:0'): - with tf.name_scope('%s_%d' % ('TOWER', i)) as scope: - (real, label) = iterator.get_next() - - loss, w = tower_loss(scope, stage, real, label) - tf.get_variable_scope().reuse_variables() - vars_ = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope=stage) - grads = opt.compute_gradients(loss, vars_) - tower_grads.append(grads) - per_gpu_w.append(w) - - grads = average_gradients(tower_grads) - apply_gradient_op = opt.apply_gradients(grads) - - mean_w = tf.reduce_mean(per_gpu_w) - train_op = apply_gradient_op - return train_op, mean_w, iterator, features_placeholder, labels_placeholder - - -def train(data, demo): - with tf.device('/cpu:0'): - opt_d = tf.train.AdamOptimizer(1e-4) - opt_g = tf.train.AdamOptimizer(1e-4) - train_d, w_distance, iterator_d, features_placeholder_d, labels_placeholder_d = graph('D', opt_d) - train_g, _, iterator_g, features_placeholder_g, labels_placeholder_g = graph('G', opt_g) - saver = tf.train.Saver() - init = tf.global_variables_initializer() - - with tf.Session(config=tf.ConfigProto(allow_soft_placement=True, log_device_placement=False)) as sess: - sess.run(init) - sess.run(iterator_d.initializer, - feed_dict={features_placeholder_d: data, - labels_placeholder_d: demo}) - sess.run(iterator_g.initializer, - feed_dict={features_placeholder_g: data, - labels_placeholder_g: demo}) - - for epoch in range(1, FLAGS.max_epochs + 1): - start_time = time.time() - w_sum = 0 - for i in range(FLAGS.max_steps): - for _ in range(2): - _, w = sess.run([train_d, w_distance]) - w_sum += w - sess.run(train_g) - duration = time.time() - start_time - - assert not np.isnan(w_sum), 'Model diverged with loss = NaN' - - format_str = 'epoch: %d, w_distance = %f (%.1f)' - print(format_str % (epoch, -w_sum/(FLAGS.max_steps*2), duration)) - if epoch % FLAGS.max_epochs == 0: - # checkpoint_path = os.path.join(train_dir, 'multi') - saver.save(sess, FLAGS.train_dir + 'emr_wgan', write_meta_graph=False, global_step=epoch) - # saver.save(sess, train_dir, global_step=epoch) - - -def generate(demo): - z = tf.random_normal(shape=[FLAGS.batchsize, FLAGS.z_dim]) - y = tf.placeholder(shape=[FLAGS.batchsize, 6], dtype=tf.int32) - label = y[:, 1] * 4 + tf.squeeze(tf.matmul(y[:, 2:], tf.constant([[0], [1], [2], [3]], dtype=tf.int32))) - fake = generator(z, label) - saver = tf.train.Saver() - with tf.Session() as sess: - saver.restore(sess, FLAGS.train_dir + 'emr_wgan-' + str(FLAGS.max_epochs)) - for m in range(2): - for n in range(2, 6): - idx1 = (demo[:, m] == 1) - idx2 = (demo[:, n] == 1) - idx = [idx1[j] and idx2[j] for j in range(len(idx1))] - num = np.sum(idx) - nbatch = int(np.ceil(num / FLAGS.batchsize)) - label_input = np.zeros((nbatch*FLAGS.batchsize, 6)) - label_input[:, n] = 1 - label_input[:, m] = 1 - output = [] - for i in range(nbatch): - f = sess.run(fake,feed_dict={y: label_input[i*FLAGS.batchsize:(i+1)*FLAGS.batchsize]}) - output.extend(np.round(f)) - output = np.array(output)[:num] - np.save(FLAGS.save_dir + 'synthetic_' + str(m) + str(n), output) - - -def load_data(): - data = np.zeros(3000) - idx = np.random.choice(np.arange(3000),size=900) - data[idx] = 1 - data = np.reshape(data, (100,30)) - idx = np.random.randint(2,6,size=100) - idx2 = np.random.randint(2,size=100) - demo = np.zeros((100,6)) - demo[np.arange(100), idx] = 1 - demo[np.arange(100), idx2] = 1 - return data, demo - - -if __name__ == '__main__': - data, demo = load_data() - print ([data.shape,demo.shape]) - train(data, demo) - # generate(demo) -