diff --git a/configs/db_new.yaml b/configs/db_new.yaml new file mode 100644 index 0000000000000000000000000000000000000000..8b4c9a08b6c988f47fd0a49addd9e99579c4151d --- /dev/null +++ b/configs/db_new.yaml @@ -0,0 +1,70 @@ +main_config: + max_missing : 120 + db_params: + user: c##mspils + password: cobalt_deviancy + dsn: localhost/XE + # Auch als Liste möglich + zrxp_folder : ../data/dwd_ens_zrpx/ + sensor_folder : ../data/db_in/ + zrxp_out_folder : ../data/zrxp_out/ + #if True tries to put all files in folder in the external forecast table + load_sensor : False + load_zrxp : False + ensemble : True + single : False + dummy : True + #start: 2019-12-01 02:00 + #start: 2023-04-19 03:00 + #start: 2023-11-19 03:00 + start: 2024-09-13 09:00 + #end: !!str 2021-02-06 + # if range is true will try all values from start to end for predictions + #range: !!bool True + range: !!bool False + zrxp: !!bool True +gauge_configs: + - gauge: hollingstedt + model_folder: ../models_torch/hollingstedt_version_14 + columns: + - 4466,SHum,vwsl + - 4466,SHum,bfwls + - 4466,AT,h.Cmd-2 + - 114069,S,15m.Cmd + - 111111,S,5m.Cmd + - 9520081,S_Tide,1m.Cmd + - 9530010,S_Tide,1m.Cmd + - 112211,Precip,h.Cmd + - 112211,S,5m.Cmd + external_fcst: + - 9530010,S_Tide,1m.Cmd + - 112211,Precip,h.Cmd + - gauge: 114547,S,60m.Cmd + model_folder: ../models_torch/tarp_version_49/ + columns: + - 4466,SHum,vwsl + - 4466,SHum,bfwls + - 4466,AT,h.Cmd + - 114435,S,60m.Cmd + - 114050,S,60m.Cmd + - 114547,S,60m.Cmd + - 114547,Precip,h.Cmd + external_fcst: + - 114547,Precip,h.Cmd + #114547,Precip,h.Cmd : Tarp + - gauge: 114069,Precip,h.Cmd + model_folder: ../models_torch/version_74_treia/ + columns: + - 4466,SHum,vwsl + - 4466,SHum,bfwls + - 4466,AT,h.Cmd + - 114435,S,60m.Cmd + - 114050,S,60m.Cmd + - 114547,S,60m.Cmd + - 114224,S,60m.Cmd + - 114061,S,60m.Cmd + - 114069,Precip,h.Cmd + - 114069,S,15m.Cmd + external_fcst: + - 114069,Precip,h.Cmd + #114069,Precip,h.Cmd : Treia \ No newline at end of file diff --git a/configs/icon_exp_ensemble.yaml b/configs/icon_exp_ensemble.yaml new file mode 100644 index 0000000000000000000000000000000000000000..27338164b0b4cc002aa2f22c9c15bec9c040bb38 --- /dev/null +++ b/configs/icon_exp_ensemble.yaml @@ -0,0 +1,9 @@ +shape_folder: ../data/meta_other/Gebiete/ +index_folder: ../data/meta_other/ensemble_index/ +source_folder: ../data/ICON_ENSEMBLE/ +target_folder: ../data/ICON_ENSEMBLE_EXPORT/ +mode: ensemble +force_index_calculation: False +log_file: ../data/ICON_ENSEMBLE_EXPORT/icon_export.log +tidy_up: True +debug: False \ No newline at end of file diff --git a/configs/icon_exp_single.yaml b/configs/icon_exp_single.yaml new file mode 100644 index 0000000000000000000000000000000000000000..5441644f63829b0105a9bc68f0ef12f64af10655 --- /dev/null +++ b/configs/icon_exp_single.yaml @@ -0,0 +1,9 @@ +shape_folder: ../data/meta_other/Gebiete/ +index_folder: ../data/meta_other/single_index/ +source_folder: ../data/ICON_D2/ +target_folder: ../data/ICON_D2_EXPORT/ +mode: single +force_index_calculation: False +log_file: ../data/ICON_ENSEMBLE_EXPORT/icon_export.log +tidy_up: True +debug: False \ No newline at end of file diff --git a/notebooks/data/dk.json b/notebooks/data/dk.json new file mode 100644 index 0000000000000000000000000000000000000000..3d80c640b643d3847a108707ec8d9c8eb905bc5b --- /dev/null +++ b/notebooks/data/dk.json @@ -0,0 +1,10071 @@ +{ + "type": "FeatureCollection", + "features": [ + { + "geometry": { + "type": "MultiPolygon", + "coordinates": [ + [ + [ + [ + 8.17560707357738, + 55.80147645034364 + ], + [ + 8.175811394924775, + 55.80147796784839 + ], + [ + 8.368306106083875, + 55.80253734910015 + ], + [ + 8.39920861463416, + 55.7901091909284 + ], + [ + 8.42447838826987, + 55.78375297076611 + ], + [ + 8.459721712590886, + 55.77106637546227 + ], + [ + 8.471969027931252, + 55.76941274952962 + ], + [ + 8.490159131276158, + 55.77147978437919 + ], + [ + 8.503026561730286, + 55.7792829105895 + ], + [ + 8.553307732933744, + 55.811296498605216 + ], + [ + 8.555478140129413, + 55.81349270440006 + ], + [ + 8.555943231512725, + 55.81607657225882 + ], + [ + 8.554237906052201, + 55.81912543998764 + ], + [ + 8.54912194349245, + 55.82545586408289 + ], + [ + 8.548501822773193, + 55.82868558605206 + ], + [ + 8.55361779392601, + 55.831527857785694 + ], + [ + 8.560749141086577, + 55.83411167230695 + ], + [ + 8.569585809065101, + 55.83540355393319 + ], + [ + 8.578732535573415, + 55.83524849887616 + ], + [ + 8.588499385666147, + 55.831476173896185 + ], + [ + 8.598886348943562, + 55.829202417393134 + ], + [ + 8.610875277338248, + 55.827936303139445 + ], + [ + 8.62761844153198, + 55.82979665549971 + ], + [ + 8.64405154841968, + 55.833904933787856 + ], + [ + 8.654593538502695, + 55.83478341441331 + ], + [ + 8.687201370536842, + 55.82995170572608 + ], + [ + 8.757481315770384, + 55.80178801830286 + ], + [ + 8.774586213717082, + 55.7988166042023 + ], + [ + 8.80698733725518, + 55.80455269904055 + ], + [ + 8.823110387699552, + 55.81034049952672 + ], + [ + 8.82295535353463, + 55.81680000457975 + ], + [ + 8.817684358139942, + 55.833749880058846 + ], + [ + 8.828278026543742, + 55.84284492031668 + ], + [ + 8.843470898212058, + 55.87230050114686 + ], + [ + 8.880057814373021, + 55.882584143794276 + ], + [ + 8.929098749185627, + 55.88000033168568 + ], + [ + 8.959381140724814, + 55.86400649390912 + ], + [ + 8.999275340451677, + 55.834731729562215 + ], + [ + 9.013072949743426, + 55.8287889521982 + ], + [ + 9.047334428007561, + 55.830907732000924 + ], + [ + 9.058083123524849, + 55.836617956922545 + ], + [ + 9.070950553214702, + 55.835713584575274 + ], + [ + 9.079787223967422, + 55.8482709664429 + ], + [ + 9.085058218700397, + 55.853541949899 + ], + [ + 9.093894893636694, + 55.859329773001974 + ], + [ + 9.10288658789021, + 55.86320544626469 + ], + [ + 9.12107669118206, + 55.86604763999335 + ], + [ + 9.137974890102475, + 55.873902523732745 + ], + [ + 9.184380325599165, + 55.907104616236445 + ], + [ + 9.211665482605554, + 55.93834303653782 + ], + [ + 9.249389277637226, + 55.94299390050005 + ], + [ + 9.343130320788484, + 55.92547553445804 + ], + [ + 9.374756301344647, + 55.91436514245439 + ], + [ + 9.399922725675944, + 55.90165276715717 + ], + [ + 9.450668982651276, + 55.865789329469074 + ], + [ + 9.458265416105375, + 55.85845124894569 + ], + [ + 9.459660681670151, + 55.853309463079334 + ], + [ + 9.462916296440518, + 55.84610060767762 + ], + [ + 9.468807407800863, + 55.83685050143775 + ], + [ + 9.491131627334669, + 55.82170931107715 + ], + [ + 9.516453079968407, + 55.797576426697766 + ], + [ + 9.530405715603825, + 55.78755118193151 + ], + [ + 9.546218706457406, + 55.78450230435875 + ], + [ + 9.581617059133832, + 55.78682771060045 + ], + [ + 9.600685661506827, + 55.78452809944401 + ], + [ + 9.605491573785457, + 55.77886956315117 + ], + [ + 9.602546012833916, + 55.77266835042036 + ], + [ + 9.598360225006857, + 55.76690646748059 + ], + [ + 9.596344843368806, + 55.76083447556116 + ], + [ + 9.597895136632635, + 55.7533930762072 + ], + [ + 9.608747184175755, + 55.74946561617856 + ], + [ + 9.641665073962033, + 55.74693350975045 + ], + [ + 9.663989289291997, + 55.7429543765599 + ], + [ + 9.675513138155502, + 55.73706332971859 + ], + [ + 9.680629104828782, + 55.72848503775071 + ], + [ + 9.679698927323647, + 55.71964835017899 + ], + [ + 9.675564814682227, + 55.70871880301719 + ], + [ + 9.675520593530397, + 55.70859818519244 + ], + [ + 9.58301842565813, + 55.708889073058614 + ], + [ + 9.559580922573973, + 55.715399451221316 + ], + [ + 9.556407097715837, + 55.711574619789154 + ], + [ + 9.554535349228082, + 55.70880764995218 + ], + [ + 9.553477410079664, + 55.70579661797767 + ], + [ + 9.552744987393648, + 55.70111724754837 + ], + [ + 9.573578319770531, + 55.695461314922625 + ], + [ + 9.645518422359826, + 55.69489163686437 + ], + [ + 9.658539258899422, + 55.69037506577659 + ], + [ + 9.710297071045407, + 55.66014232054865 + ], + [ + 9.720957880166207, + 55.64826082310041 + ], + [ + 9.728200717344835, + 55.63751862616716 + ], + [ + 9.73829186429071, + 55.62970613758564 + ], + [ + 9.75814863414666, + 55.62665436576682 + ], + [ + 9.85377037914811, + 55.62665436576682 + ], + [ + 9.837412956073978, + 55.61522043392491 + ], + [ + 9.785492384007366, + 55.59194570508788 + ], + [ + 9.757660351929335, + 55.571600652146245 + ], + [ + 9.744151237149831, + 55.56635161972159 + ], + [ + 9.720876498698033, + 55.56460196345832 + ], + [ + 9.7124129562298, + 55.56028879465636 + ], + [ + 9.710622593448813, + 55.55093009758133 + ], + [ + 9.71029707258739, + 55.541571375268184 + ], + [ + 9.70687910073204, + 55.53729887960059 + ], + [ + 9.643890821389626, + 55.52830638676298 + ], + [ + 9.621104363115515, + 55.518947659415055 + ], + [ + 9.607269726567162, + 55.51679107768524 + ], + [ + 9.595225456492976, + 55.51837799076683 + ], + [ + 9.572764518424071, + 55.52391184390417 + ], + [ + 9.559580923492513, + 55.52362700140266 + ], + [ + 9.562185092801702, + 55.51951732988752 + ], + [ + 9.566416862569017, + 55.51679107768524 + ], + [ + 9.551931185564605, + 55.50511301762154 + ], + [ + 9.514903192305688, + 55.49819572119229 + ], + [ + 9.49805748833703, + 55.489447337187244 + ], + [ + 9.515635614402576, + 55.48314038439945 + ], + [ + 9.534434439495858, + 55.483832079375595 + ], + [ + 9.5836694664389, + 55.49115631008924 + ], + [ + 9.592784051191279, + 55.4942894696549 + ], + [ + 9.602386913984125, + 55.49579497651419 + ], + [ + 9.660166863421669, + 55.4779727281527 + ], + [ + 9.664235874172954, + 55.466701579333595 + ], + [ + 9.659922722676862, + 55.45302969827726 + ], + [ + 9.648936394034388, + 55.4410667994202 + ], + [ + 9.635264520273964, + 55.43573641062711 + ], + [ + 9.601410350913916, + 55.43183013162742 + ], + [ + 9.586761915082118, + 55.42743561501192 + ], + [ + 9.592051628105969, + 55.41632720497663 + ], + [ + 9.604340041430191, + 55.40119051461637 + ], + [ + 9.607920768390393, + 55.393255919491075 + ], + [ + 9.60515384131264, + 55.38202544952899 + ], + [ + 9.599294468407335, + 55.37335848335683 + ], + [ + 9.600840689547587, + 55.36591215908493 + ], + [ + 9.621104364564076, + 55.358547287253366 + ], + [ + 9.63347415416924, + 55.351996150783876 + ], + [ + 9.645274285828965, + 55.34271882718258 + ], + [ + 9.64828535143677, + 55.3329124950804 + ], + [ + 9.634613476524187, + 55.32501861614474 + ], + [ + 9.634613476355934, + 55.31757232507297 + ], + [ + 9.644867384196797, + 55.310939848418954 + ], + [ + 9.681895378775444, + 55.27464427024593 + ], + [ + 9.694183789662452, + 55.27163320086574 + ], + [ + 9.706065300480738, + 55.265082103879735 + ], + [ + 9.71029707169057, + 55.248683994446814 + ], + [ + 9.70655358250278, + 55.23700593315368 + ], + [ + 9.699473504757053, + 55.22980378727489 + ], + [ + 9.692881707753767, + 55.22101472524301 + ], + [ + 9.689789258701413, + 55.204901430334786 + ], + [ + 9.684418165801725, + 55.1975772258461 + ], + [ + 9.671560091360043, + 55.19245025412797 + ], + [ + 9.655772332527341, + 55.18943920149645 + ], + [ + 9.642100457188864, + 55.18842194432937 + ], + [ + 9.583343946606071, + 55.1946475355221 + ], + [ + 9.569834831845437, + 55.192531641077665 + ], + [ + 9.561534049384047, + 55.187201231334384 + ], + [ + 9.554942253758345, + 55.18056874898944 + ], + [ + 9.545909050111222, + 55.17413971738564 + ], + [ + 9.521820509343181, + 55.165432037289875 + ], + [ + 9.507172071189414, + 55.16193268637121 + ], + [ + 9.494395378724967, + 55.16054921755007 + ], + [ + 9.486338738265216, + 55.156236073328046 + ], + [ + 9.486338738048264, + 55.14740631760494 + ], + [ + 9.494639519095053, + 55.140082099191496 + ], + [ + 9.511892123095185, + 55.140082099191496 + ], + [ + 9.51189212281142, + 55.13320546863723 + ], + [ + 9.482432487933002, + 55.13377513115708 + ], + [ + 9.46876061325089, + 55.13178132015613 + ], + [ + 9.456553582031837, + 55.12571849240018 + ], + [ + 9.466644726946955, + 55.12140534033129 + ], + [ + 9.479177279933268, + 55.11969635516042 + ], + [ + 9.508067254074023, + 55.119533596927006 + ], + [ + 9.521332227373646, + 55.115871490677044 + ], + [ + 9.535817904870262, + 55.1068789713773 + ], + [ + 9.55958092479999, + 55.08539459550287 + ], + [ + 9.533702018955712, + 55.065822657444464 + ], + [ + 9.516612174926012, + 55.0565453140707 + ], + [ + 9.498057487958585, + 55.05068593947829 + ], + [ + 9.452403191174048, + 55.045355538197924 + ], + [ + 9.43433678501543, + 55.037543036194315 + ], + [ + 9.442881707086386, + 55.02338288109222 + ], + [ + 9.455251498001637, + 55.023179429020075 + ], + [ + 9.511892122979443, + 55.0363630227412 + ], + [ + 9.537364128999789, + 55.03538645999635 + ], + [ + 9.545909049979404, + 55.0363630227412 + ], + [ + 9.553233268909093, + 55.03961822986234 + ], + [ + 9.5602319668574, + 55.04441966221794 + ], + [ + 9.568532748091803, + 55.04877350614511 + ], + [ + 9.579925976958215, + 55.05068593947829 + ], + [ + 9.601573112806816, + 55.045396223596086 + ], + [ + 9.641856315981128, + 55.019720769765065 + ], + [ + 9.662608269005544, + 55.00971100506752 + ], + [ + 9.717539910000141, + 55.00116608300078 + ], + [ + 9.730967634208934, + 54.995428657673976 + ], + [ + 9.738617384347835, + 54.98086172527219 + ], + [ + 9.760752793745374, + 54.904974600182385 + ], + [ + 9.763194214662695, + 54.900458169096815 + ], + [ + 9.761566598287489, + 54.89740627139821 + ], + [ + 9.751963736498542, + 54.89301176753617 + ], + [ + 9.737071160338674, + 54.892767649169954 + ], + [ + 9.722992378965934, + 54.89765039592069 + ], + [ + 9.71143638424735, + 54.89858620959221 + ], + [ + 9.703461141444187, + 54.886175940011945 + ], + [ + 9.715993693787642, + 54.88377522813949 + ], + [ + 9.727875205552087, + 54.879462098791386 + ], + [ + 9.737803575272267, + 54.873521143106785 + ], + [ + 9.744476750329023, + 54.86627828021659 + ], + [ + 9.748301616468707, + 54.857855381702095 + ], + [ + 9.750254752226777, + 54.84625883416297 + ], + [ + 9.745941600347193, + 54.83600492963127 + ], + [ + 9.731211778232606, + 54.83152895546649 + ], + [ + 9.719574418590241, + 54.83584223436277 + ], + [ + 9.70533286685405, + 54.85455948677131 + ], + [ + 9.69662518627868, + 54.85887266866664 + ], + [ + 9.65503991521983, + 54.85708248889256 + ], + [ + 9.642100447333359, + 54.85887266866664 + ], + [ + 9.623301639062129, + 54.865301950477324 + ], + [ + 9.624685079444063, + 54.86928929846198 + ], + [ + 9.632823112793242, + 54.87311432242379 + ], + [ + 9.634613488047739, + 54.879340048554255 + ], + [ + 9.616953961993252, + 54.89423236422661 + ], + [ + 9.610362173122594, + 54.90322497857955 + ], + [ + 9.617523624347543, + 54.90729389169413 + ], + [ + 9.644216333912572, + 54.9132753574955 + ], + [ + 9.638845235807219, + 54.9247499154399 + ], + [ + 9.615000849914543, + 54.932684673309545 + ], + [ + 9.586761924293263, + 54.92772056113519 + ], + [ + 9.581228056658931, + 54.92177963571003 + ], + [ + 9.568044470857693, + 54.90143468533682 + ], + [ + 9.556813992140066, + 54.890326165465375 + ], + [ + 9.550547716384926, + 54.88271711947688 + ], + [ + 9.545909060946043, + 54.879340048554255 + ], + [ + 9.538340695791549, + 54.87791581726283 + ], + [ + 9.522146041443072, + 54.87929936515986 + ], + [ + 9.515147339700981, + 54.87592203909655 + ], + [ + 9.498057491648527, + 54.86432530809857 + ], + [ + 9.456390811646466, + 54.843817331229346 + ], + [ + 9.442881700433123, + 54.83152895546649 + ], + [ + 9.43628990133417, + 54.81045145462693 + ], + [ + 9.437503025229184, + 54.81041110414442 + ], + [ + 9.436922237825144, + 54.81014390156533 + ], + [ + 9.422142786558886, + 54.8072501319129 + ], + [ + 9.405192909137645, + 54.80838691919905 + ], + [ + 9.385039094237555, + 54.819393910814675 + ], + [ + 9.366952354960588, + 54.81701694497835 + ], + [ + 9.35599694771287, + 54.811332495311206 + ], + [ + 9.341734265085929, + 54.809058891495006 + ], + [ + 9.332019086123955, + 54.803219327357674 + ], + [ + 9.317032927368201, + 54.80161745087352 + ], + [ + 9.24406580607896, + 54.80177249106897 + ], + [ + 9.22639245957918, + 54.805906520628426 + ], + [ + 9.219054415111097, + 54.81779221185391 + ], + [ + 9.216057170714539, + 54.831744712344474 + ], + [ + 9.211509651938867, + 54.84182169749087 + ], + [ + 9.194766481215852, + 54.850399897874475 + ], + [ + 8.982686407978816, + 54.87933877719802 + ], + [ + 8.904138224106939, + 54.89794220143914 + ], + [ + 8.8242464522875, + 54.905900278812105 + ], + [ + 8.800888718498516, + 54.90383340251752 + ], + [ + 8.73277916991628, + 54.88905378418294 + ], + [ + 8.69588221081347, + 54.88998415054627 + ], + [ + 8.660775938016705, + 54.896311211608904 + ], + [ + 8.668793158135317, + 54.913519503092054 + ], + [ + 8.661387555528917, + 54.92031470113451 + ], + [ + 8.67481530286424, + 54.94794346653396 + ], + [ + 8.66635176611271, + 54.97333415020506 + ], + [ + 8.650401248308818, + 54.997992397502145 + ], + [ + 8.640961134079038, + 55.02338288109222 + ], + [ + 8.64478600409789, + 55.055731513350935 + ], + [ + 8.656504753869235, + 55.08250559919663 + ], + [ + 8.670909050192723, + 55.10712311664887 + ], + [ + 8.681895378827914, + 55.13320546863723 + ], + [ + 8.575531446003332, + 55.144110419045376 + ], + [ + 8.563649935800209, + 55.143377994220394 + ], + [ + 8.5599064458335, + 55.13507721368162 + ], + [ + 8.55738365988096, + 55.096380925339744 + ], + [ + 8.551117383700921, + 55.09137603982726 + ], + [ + 8.541351759227348, + 55.0902774141742 + ], + [ + 8.528575066124937, + 55.081976631746606 + ], + [ + 8.514821810967796, + 55.07078685054798 + ], + [ + 8.499359570988963, + 55.06557851784406 + ], + [ + 8.48389733210726, + 55.06761302450786 + ], + [ + 8.470225456956383, + 55.078599350384444 + ], + [ + 8.458832226774858, + 55.10346100182525 + ], + [ + 8.46021569098076, + 55.12738678572781 + ], + [ + 8.46648196699231, + 55.15159739789119 + ], + [ + 8.470225456362806, + 55.17755767504625 + ], + [ + 8.488942905351012, + 55.197170319917745 + ], + [ + 8.530528190421357, + 55.200751035931305 + ], + [ + 8.572601758763724, + 55.19367096272045 + ], + [ + 8.593760613317961, + 55.18097565740169 + ], + [ + 8.580332878739016, + 55.1789411443792 + ], + [ + 8.56950931161871, + 55.17377351359249 + ], + [ + 8.552093945752262, + 55.16054921755007 + ], + [ + 8.55518639366981, + 55.149359437402225 + ], + [ + 8.614756706687587, + 55.14435455467931 + ], + [ + 8.669281445794743, + 55.13690826717837 + ], + [ + 8.689463737645074, + 55.14158762713324 + ], + [ + 8.687185092204567, + 55.160589913803065 + ], + [ + 8.668793165548237, + 55.1946475355221 + ], + [ + 8.65593509262145, + 55.23749421652995 + ], + [ + 8.648448112966872, + 55.28025950654425 + ], + [ + 8.650401238383944, + 55.291734122265 + ], + [ + 8.659353061526872, + 55.30581290021579 + ], + [ + 8.661387565708639, + 55.31443919101 + ], + [ + 8.659678581507512, + 55.32721588025758 + ], + [ + 8.648448113482958, + 55.352280998613985 + ], + [ + 8.63941490970425, + 55.39740631294858 + ], + [ + 8.632090692441972, + 55.4187686417526 + ], + [ + 8.617686393771129, + 55.43793365785988 + ], + [ + 8.592539910508489, + 55.44928620699118 + ], + [ + 8.556651239028431, + 55.45429109719838 + ], + [ + 8.490000847751606, + 55.455308345457375 + ], + [ + 8.441905144419705, + 55.46393464287095 + ], + [ + 8.401133658984431, + 55.48566313872918 + ], + [ + 8.31039472791438, + 55.562689532961016 + ], + [ + 8.311208529085114, + 55.56976958503396 + ], + [ + 8.33236738526004, + 55.57200757180966 + ], + [ + 8.332367385110652, + 55.57827384569545 + ], + [ + 8.31316165560184, + 55.582709060523605 + ], + [ + 8.290212436488272, + 55.58380769093401 + ], + [ + 8.267832878595701, + 55.580511780240855 + ], + [ + 8.250498895247686, + 55.57200757180966 + ], + [ + 8.239024284135542, + 55.55792876563688 + ], + [ + 8.24203535163392, + 55.54852935876459 + ], + [ + 8.271739128343198, + 55.53046294863682 + ], + [ + 8.26246178524464, + 55.52920156848994 + ], + [ + 8.25635826872602, + 55.52680084408582 + ], + [ + 8.243662956628619, + 55.51679107768524 + ], + [ + 8.2667749356007, + 55.51439036130193 + ], + [ + 8.293955924990433, + 55.504339910862626 + ], + [ + 8.316742383518553, + 55.48940663616638 + ], + [ + 8.326182486754506, + 55.472113330334665 + ], + [ + 8.313324415145892, + 55.46930573307157 + ], + [ + 8.193614128162096, + 55.52594634394013 + ], + [ + 8.16920006603527, + 55.53408437750778 + ], + [ + 8.11060631515597, + 55.5402285707321 + ], + [ + 8.095225456630736, + 55.549058329623165 + ], + [ + 8.094004754717746, + 55.563910233445114 + ], + [ + 8.150238476726635, + 55.64923736805667 + ], + [ + 8.16911868754386, + 55.687933683190366 + ], + [ + 8.18165123761594, + 55.72907135049285 + ], + [ + 8.183360224078854, + 55.76959872976427 + ], + [ + 8.17560707357738, + 55.80147645034364 + ] + ] + ], + [ + [ + [ + 10.450205936170002, + 54.89980716115108 + ], + [ + 10.466644718324003, + 54.89057028059205 + ], + [ + 10.485687705388898, + 54.88369385939922 + ], + [ + 10.499196813618319, + 54.874579198919406 + ], + [ + 10.498708519474526, + 54.85887266866664 + ], + [ + 10.508148639101282, + 54.85492590627374 + ], + [ + 10.518565299877851, + 54.851996159604916 + ], + [ + 10.440114787779645, + 54.841498203481926 + ], + [ + 10.418630399413951, + 54.827826174591685 + ], + [ + 10.406260620158449, + 54.82469318364205 + ], + [ + 10.383474148793761, + 54.830755855204266 + ], + [ + 10.211436397379803, + 54.93935785463954 + ], + [ + 10.195160349701462, + 54.96185941501375 + ], + [ + 10.18921959670927, + 54.97748443858516 + ], + [ + 10.203868030044573, + 54.96808496988032 + ], + [ + 10.288747587810368, + 54.940741229231854 + ], + [ + 10.313731325997992, + 54.92772056113519 + ], + [ + 10.337250187560384, + 54.90591045113062 + ], + [ + 10.34961998817365, + 54.89671458701082 + ], + [ + 10.364268423404269, + 54.89301176753617 + ], + [ + 10.379405142092576, + 54.89305245396206 + ], + [ + 10.391368043222768, + 54.8954532868832 + ], + [ + 10.395843943210103, + 54.903469110826904 + ], + [ + 10.38884523544047, + 54.92031470113451 + ], + [ + 10.403086773602622, + 54.90839251367642 + ], + [ + 10.41521243272885, + 54.87848538132226 + ], + [ + 10.426036014434958, + 54.872504093084274 + ], + [ + 10.446787969577972, + 54.87238204745899 + ], + [ + 10.46094811694495, + 54.87449783424342 + ], + [ + 10.46363365701385, + 54.882473014767264 + ], + [ + 10.450205936170002, + 54.89980716115108 + ] + ] + ], + [ + [ + [ + 10.011403835368746, + 54.975531237746765 + ], + [ + 10.025889521517511, + 54.959784277055974 + ], + [ + 10.065114769596327, + 54.890814396042586 + ], + [ + 10.064463727131873, + 54.88100807546556 + ], + [ + 10.050466333670308, + 54.87885153057662 + ], + [ + 10.021983280491938, + 54.879340048554255 + ], + [ + 10.00709068967912, + 54.87482329316303 + ], + [ + 9.98755943924883, + 54.86591215595805 + ], + [ + 9.968028192634538, + 54.85952384968041 + ], + [ + 9.953623907000386, + 54.86261643474964 + ], + [ + 9.937185102720004, + 54.870917188439385 + ], + [ + 9.891123905886525, + 54.87929936515986 + ], + [ + 9.874278198575254, + 54.886175940011945 + ], + [ + 9.874278189479712, + 54.89301176753617 + ], + [ + 9.900889525744638, + 54.90070229767064 + ], + [ + 9.932302280044915, + 54.892808335541815 + ], + [ + 9.963877803202015, + 54.87995030054906 + ], + [ + 9.991547081000085, + 54.872504093084274 + ], + [ + 9.991547082457021, + 54.879340048554255 + ], + [ + 9.943695505917669, + 54.90363186583856 + ], + [ + 9.898285343276806, + 54.91299052137614 + ], + [ + 9.887950058169896, + 54.913519503092054 + ], + [ + 9.877940289401584, + 54.91132220440013 + ], + [ + 9.863536002513195, + 54.90306222392806 + ], + [ + 9.85377038953248, + 54.89980716115108 + ], + [ + 9.814707884250783, + 54.9010684911391 + ], + [ + 9.780935092277268, + 54.91681550039663 + ], + [ + 9.761241085303888, + 54.94306065453227 + ], + [ + 9.764414903532382, + 54.975531237746765 + ], + [ + 9.75814862753651, + 54.975531237746765 + ], + [ + 9.758148636960843, + 54.98175693929783 + ], + [ + 9.78028404428746, + 54.972845700107776 + ], + [ + 9.804047068829648, + 54.957180053498845 + ], + [ + 9.827403179286081, + 54.94546118726518 + ], + [ + 9.847504109503829, + 54.948187658239775 + ], + [ + 9.843272324980585, + 54.95172752061992 + ], + [ + 9.836680536522799, + 54.95856356253031 + ], + [ + 9.832774283830954, + 54.96515532676951 + ], + [ + 9.836680530222894, + 54.96808496988032 + ], + [ + 9.84213301051801, + 54.9702823124562 + ], + [ + 9.835215699849782, + 54.975165213661896 + ], + [ + 9.793711782987808, + 54.9921735454269 + ], + [ + 9.786794467000117, + 55.00014883000086 + ], + [ + 9.799082879005613, + 55.00971100506752 + ], + [ + 9.799082879020935, + 55.01654694225437 + ], + [ + 9.720550976998437, + 55.012274480979904 + ], + [ + 9.706879101955433, + 55.013128972450346 + ], + [ + 9.696950716992406, + 55.01752350490582 + ], + [ + 9.684825066013111, + 55.0185407571608 + ], + [ + 9.6741642589505, + 55.02106354338773 + ], + [ + 9.66879316492514, + 55.03021881607416 + ], + [ + 9.672373894091065, + 55.034654040123684 + ], + [ + 9.691254101941746, + 55.042792058281215 + ], + [ + 9.696625195957703, + 55.05068593947829 + ], + [ + 9.678233268870711, + 55.05019765640373 + ], + [ + 9.661631706892349, + 55.04653554766841 + ], + [ + 9.645518424958007, + 55.044867254478994 + ], + [ + 9.628428581958014, + 55.05068593947829 + ], + [ + 9.628428581901334, + 55.058050846775586 + ], + [ + 9.716644726875185, + 55.08222076846654 + ], + [ + 9.751963737795956, + 55.08539459550287 + ], + [ + 9.793467644098452, + 55.080755928198464 + ], + [ + 9.853037956996921, + 55.04010651196177 + ], + [ + 9.884776237923482, + 55.03021881607416 + ], + [ + 9.929047071091386, + 55.02619049809902 + ], + [ + 9.960948112973046, + 55.01504140767561 + ], + [ + 9.986582881120201, + 54.99774813938632 + ], + [ + 10.011403835368746, + 54.975531237746765 + ] + ] + ], + [ + [ + [ + 10.854991081902677, + 55.0438500009283 + ], + [ + 10.861989780056026, + 55.07208893461515 + ], + [ + 10.895518425158832, + 55.12152741273652 + ], + [ + 10.934255405235449, + 55.159654041563364 + ], + [ + 10.956716341899812, + 55.1537132819106 + ], + [ + 10.908946159915544, + 55.03021881607416 + ], + [ + 10.889903177323907, + 54.99738175282415 + ], + [ + 10.837738479544045, + 54.9372419571147 + ], + [ + 10.741058781045634, + 54.75238668564687 + ], + [ + 10.731293165941244, + 54.74005769456112 + ], + [ + 10.717539896946489, + 54.73578507631932 + ], + [ + 10.693858268610699, + 54.73529693761481 + ], + [ + 10.681895392013429, + 54.74481857368004 + ], + [ + 10.656423360525931, + 54.80365617628867 + ], + [ + 10.645518418570633, + 54.814601882421414 + ], + [ + 10.632090693905937, + 54.82343174083612 + ], + [ + 10.601084833774252, + 54.8383243060986 + ], + [ + 10.625336130705339, + 54.83930081076209 + ], + [ + 10.64087976293952, + 54.84320719887792 + ], + [ + 10.653493696091447, + 54.85163006371646 + ], + [ + 10.669281436506099, + 54.86627828021659 + ], + [ + 10.678558779577445, + 54.88304259292659 + ], + [ + 10.68441815483303, + 54.88784402887487 + ], + [ + 10.69385826219115, + 54.8827578036266 + ], + [ + 10.698903828140368, + 54.88117081060754 + ], + [ + 10.724457235227463, + 54.886175940011945 + ], + [ + 10.722178586320611, + 54.89606359231655 + ], + [ + 10.717784039243305, + 54.90729389169413 + ], + [ + 10.708994986718787, + 54.90298084665635 + ], + [ + 10.700368689769318, + 54.901556750231435 + ], + [ + 10.691905142720836, + 54.90298084665635 + ], + [ + 10.682953310278243, + 54.90729389169413 + ], + [ + 10.697032085607441, + 54.93134173641795 + ], + [ + 10.718272330241486, + 54.9491641103546 + ], + [ + 10.774099162127976, + 54.98655841413487 + ], + [ + 10.798187704865208, + 54.9965681991756 + ], + [ + 10.854991081902677, + 55.0438500009283 + ] + ] + ], + [ + [ + [ + 8.476410352019505, + 55.34544505427165 + ], + [ + 8.451670769020973, + 55.33974844029284 + ], + [ + 8.425303582488972, + 55.35553620687327 + ], + [ + 8.402354362199478, + 55.38003163172106 + ], + [ + 8.370941602071719, + 55.42743561501192 + ], + [ + 8.364268424786678, + 55.44904205598535 + ], + [ + 8.374359571322282, + 55.46328360054461 + ], + [ + 8.408213736896347, + 55.46841053349749 + ], + [ + 8.411957225773904, + 55.43870675577208 + ], + [ + 8.428558788624667, + 55.43353911071081 + ], + [ + 8.44890384071442, + 55.43553292701415 + ], + [ + 8.463389519072525, + 55.42743561501192 + ], + [ + 8.463063997781898, + 55.41571686095161 + ], + [ + 8.450205924979855, + 55.39337799671668 + ], + [ + 8.45289147117176, + 55.38336821940135 + ], + [ + 8.46045983159376, + 55.37238189731395 + ], + [ + 8.46745853142103, + 55.35370516487879 + ], + [ + 8.476410352019505, + 55.34544505427165 + ] + ] + ], + [ + [ + [ + 10.66627037913914, + 55.064276435556344 + ], + [ + 10.621429884138543, + 55.064276435556344 + ], + [ + 10.604746940990548, + 55.06256744989268 + ], + [ + 10.586110872889401, + 55.05752187575211 + ], + [ + 10.568858268830168, + 55.04987213508023 + ], + [ + 10.556407096996702, + 55.04010651196177 + ], + [ + 10.538259310962179, + 55.029730535570685 + ], + [ + 10.516856316010402, + 55.031317450117264 + ], + [ + 10.477549674906069, + 55.0438500009283 + ], + [ + 10.410655143950128, + 55.04694244942693 + ], + [ + 10.388845247954656, + 55.05068593947829 + ], + [ + 10.37916100379613, + 55.05451080665386 + ], + [ + 10.371348504047862, + 55.05902741155003 + ], + [ + 10.36255944116261, + 55.06273021187225 + ], + [ + 10.350922071135017, + 55.064276435556344 + ], + [ + 10.325938346829322, + 55.06289296902591 + ], + [ + 10.31275475394861, + 55.06366608240427 + ], + [ + 10.234222852047766, + 55.09223053655587 + ], + [ + 10.20525149839707, + 55.09369538464015 + ], + [ + 10.197601758649801, + 55.08608632590246 + ], + [ + 10.197032097133034, + 55.064276435556344 + ], + [ + 10.181895379145494, + 55.070868232704555 + ], + [ + 10.158376497826332, + 55.08665598995992 + ], + [ + 10.14177493604731, + 55.09223053655587 + ], + [ + 10.121267122585314, + 55.09247467211241 + ], + [ + 10.083506706610983, + 55.086981507396715 + ], + [ + 10.06592858204699, + 55.09223053655587 + ], + [ + 10.14258873776995, + 55.125799869296046 + ], + [ + 10.156016472101667, + 55.140082099191496 + ], + [ + 10.1386824883511, + 55.154282949122766 + ], + [ + 10.126149936039596, + 55.17064036746492 + ], + [ + 10.112966342065567, + 55.18378327077089 + ], + [ + 10.094004754197709, + 55.18842194432937 + ], + [ + 10.078949415716782, + 55.18593985446082 + ], + [ + 10.067556185548955, + 55.18227773066841 + ], + [ + 10.055837436406074, + 55.17975495480498 + ], + [ + 10.039317254371431, + 55.18097565740169 + ], + [ + 10.016612174423653, + 55.19428131015439 + ], + [ + 10.003591342269122, + 55.197333078199385 + ], + [ + 9.991547071195688, + 55.18842194432937 + ], + [ + 9.994476758325675, + 55.17279693396882 + ], + [ + 10.029144726584947, + 55.14240142906952 + ], + [ + 10.025645379033746, + 55.12571849240018 + ], + [ + 10.020681185789915, + 55.12445709550095 + ], + [ + 9.985524935862239, + 55.12775299535552 + ], + [ + 9.982920768663558, + 55.130601299983184 + ], + [ + 9.984385613312474, + 55.13467031872778 + ], + [ + 9.984141472099946, + 55.140082099191496 + ], + [ + 9.986989780308171, + 55.139349680675494 + ], + [ + 9.989756706766967, + 55.143377994220394 + ], + [ + 9.988780144385869, + 55.15184154359939 + ], + [ + 9.974294467366468, + 55.174017649372324 + ], + [ + 9.97201582060627, + 55.18471913830127 + ], + [ + 9.974294467524063, + 55.19525788624932 + ], + [ + 9.980723503692467, + 55.204901430334786 + ], + [ + 9.979502799841496, + 55.21283600311051 + ], + [ + 9.96412194133203, + 55.21816641196038 + ], + [ + 9.924001498158022, + 55.22394440489207 + ], + [ + 9.911387565608264, + 55.228501690301705 + ], + [ + 9.900157096554675, + 55.23411692365382 + ], + [ + 9.89535566472633, + 55.23899973871322 + ], + [ + 9.893239780248472, + 55.24445221898316 + ], + [ + 9.883962435326753, + 55.253485410909555 + ], + [ + 9.88168378977043, + 55.25950755124045 + ], + [ + 9.883962436303584, + 55.26561107364629 + ], + [ + 9.893239779770504, + 55.27464427024593 + ], + [ + 9.895355664962064, + 55.28025950654425 + ], + [ + 9.89323977969637, + 55.29124583135807 + ], + [ + 9.88396243549658, + 55.308783263959086 + ], + [ + 9.881683789667559, + 55.31443919101 + ], + [ + 9.884776237233998, + 55.348863013818445 + ], + [ + 9.87452233206857, + 55.353338934821686 + ], + [ + 9.858897331987555, + 55.35407135584959 + ], + [ + 9.829844597548924, + 55.352280998613985 + ], + [ + 9.816579623363888, + 55.35521068339016 + ], + [ + 9.80241946702246, + 55.36237213727084 + ], + [ + 9.778086783780262, + 55.380275768233055 + ], + [ + 9.7949324871601, + 55.38385650184981 + ], + [ + 9.813649937008709, + 55.3851586081649 + ], + [ + 9.830088736929644, + 55.38890207211294 + ], + [ + 9.840098503195733, + 55.40009185432919 + ], + [ + 9.78549238492329, + 55.41376375015809 + ], + [ + 9.793711785483225, + 55.41278718283455 + ], + [ + 9.819590691926528, + 55.41376375015809 + ], + [ + 9.809255405561906, + 55.425482495772144 + ], + [ + 9.791026236892481, + 55.43183013162742 + ], + [ + 9.75131269645672, + 55.43488190853541 + ], + [ + 9.73731529931474, + 55.438177793681234 + ], + [ + 9.703461133117822, + 55.462144262260786 + ], + [ + 9.7034611327263, + 55.46841053349749 + ], + [ + 9.73023522162668, + 55.46149322646713 + ], + [ + 9.758474156547944, + 55.447495853743064 + ], + [ + 9.78630618710932, + 55.43720125239728 + ], + [ + 9.812185092034932, + 55.4410667994202 + ], + [ + 9.713389518897008, + 55.49103424574749 + ], + [ + 9.676280143179644, + 55.49567290899384 + ], + [ + 9.683929884991167, + 55.50604890707477 + ], + [ + 9.69353274851258, + 55.51019929223739 + ], + [ + 9.70476321603026, + 55.51239654021229 + ], + [ + 9.717784049562201, + 55.51679107768524 + ], + [ + 9.755137566103793, + 55.54413483425302 + ], + [ + 9.809092645416975, + 55.55027905602217 + ], + [ + 9.827403191077227, + 55.548407294925084 + ], + [ + 9.830821161406048, + 55.54311759685677 + ], + [ + 9.83130943938249, + 55.534735399602695 + ], + [ + 9.840098502448265, + 55.52362700140266 + ], + [ + 9.88819420627223, + 55.508693743316286 + ], + [ + 9.940277540457153, + 55.51923249442325 + ], + [ + 10.039317254046738, + 55.55776601854763 + ], + [ + 10.168630406190562, + 55.58226147378536 + ], + [ + 10.212657096884394, + 55.60065338566679 + ], + [ + 10.237315300582194, + 55.60553620669113 + ], + [ + 10.264170767786522, + 55.6066348190875 + ], + [ + 10.272146030584128, + 55.60553620669113 + ], + [ + 10.272797071676962, + 55.60252513975437 + ], + [ + 10.279063347533423, + 55.58832429210833 + ], + [ + 10.279551630326392, + 55.585109783190866 + ], + [ + 10.293630405405752, + 55.588080149640064 + ], + [ + 10.297129755584754, + 55.59564852311406 + ], + [ + 10.293793163996485, + 55.61611561553079 + ], + [ + 10.307627800466742, + 55.61823151732566 + ], + [ + 10.338389519013925, + 55.610093492157546 + ], + [ + 10.407399936317791, + 55.58295319559485 + ], + [ + 10.423024937576159, + 55.57200757180966 + ], + [ + 10.435394727884422, + 55.55971915298427 + ], + [ + 10.44418379090594, + 55.55316804622046 + ], + [ + 10.453623895510075, + 55.55027905602217 + ], + [ + 10.471934441874337, + 55.54901765483858 + ], + [ + 10.488536002447987, + 55.54547758855931 + ], + [ + 10.518809439476504, + 55.533433317923496 + ], + [ + 10.502452018864341, + 55.537176823476415 + ], + [ + 10.486827018864547, + 55.537176823476415 + ], + [ + 10.475271029151596, + 55.5319684824506 + ], + [ + 10.470713737518661, + 55.52020904657752 + ], + [ + 10.474375845616782, + 55.515814504422785 + ], + [ + 10.489105663715645, + 55.509100638554116 + ], + [ + 10.49122155113084, + 55.50311922071705 + ], + [ + 10.486013217039657, + 55.49477773644582 + ], + [ + 10.477793817074064, + 55.492499103097465 + ], + [ + 10.46778404890904, + 55.49213286769851 + ], + [ + 10.457041863371053, + 55.489447337187244 + ], + [ + 10.419606966568162, + 55.45880767910378 + ], + [ + 10.436045769024572, + 55.442124742277294 + ], + [ + 10.4715275405342, + 55.443060620027985 + ], + [ + 10.491221551701535, + 55.46527743015443 + ], + [ + 10.502452018936758, + 55.46356842628779 + ], + [ + 10.566905142637065, + 55.4826520697724 + ], + [ + 10.604746941376291, + 55.489447337187244 + ], + [ + 10.609141471613277, + 55.492580466696914 + ], + [ + 10.60718834784971, + 55.499416417451805 + ], + [ + 10.602305535499237, + 55.50625235155454 + ], + [ + 10.597666862828552, + 55.50934479209061 + ], + [ + 10.588715039622256, + 55.51068756279093 + ], + [ + 10.579112174824804, + 55.514105534045584 + ], + [ + 10.571055536306217, + 55.51870353257262 + ], + [ + 10.566905142333656, + 55.52362700140266 + ], + [ + 10.569834833695124, + 55.53241610190702 + ], + [ + 10.58228600437224, + 55.52912018814671 + ], + [ + 10.595550977079057, + 55.521389065879134 + ], + [ + 10.601084831522392, + 55.51679107768524 + ], + [ + 10.613780144582275, + 55.527899487467856 + ], + [ + 10.6105249359641, + 55.54364655160074 + ], + [ + 10.60084069266351, + 55.561224695486196 + ], + [ + 10.594248895412154, + 55.57827384569545 + ], + [ + 10.599131708121279, + 55.57367586045816 + ], + [ + 10.614593946762222, + 55.56460196345832 + ], + [ + 10.615000847911244, + 55.58144766810706 + ], + [ + 10.608409050495453, + 55.606268627494764 + ], + [ + 10.608571811966439, + 55.61298249971747 + ], + [ + 10.627614780144961, + 55.613592842604334 + ], + [ + 10.65691165392684, + 55.59487538114644 + ], + [ + 10.704112176546262, + 55.55027905602217 + ], + [ + 10.706065298257984, + 55.543890672821085 + ], + [ + 10.707041862042445, + 55.534979548455794 + ], + [ + 10.709239128368214, + 55.52704498204273 + ], + [ + 10.71436608031039, + 55.52362700140266 + ], + [ + 10.716807489745976, + 55.521429774212734 + ], + [ + 10.738129101826269, + 55.50934479209061 + ], + [ + 10.744476758413478, + 55.49555084155718 + ], + [ + 10.742360873809709, + 55.48749421689665 + ], + [ + 10.73308353070581, + 55.48187898076292 + ], + [ + 10.703949416511954, + 55.47113680267958 + ], + [ + 10.657481316302636, + 55.45970287035339 + ], + [ + 10.588389520315193, + 55.46336499066999 + ], + [ + 10.571299675682166, + 55.461249098621295 + ], + [ + 10.559418165934762, + 55.455308345457375 + ], + [ + 10.554047071211933, + 55.44257233537236 + ], + [ + 10.563324414964208, + 55.43622467659879 + ], + [ + 10.579437696984174, + 55.43585847699492 + ], + [ + 10.59424889403775, + 55.4410667994202 + ], + [ + 10.58961022030816, + 55.446234423120316 + ], + [ + 10.588552281130914, + 55.4494082826193 + ], + [ + 10.595957878921256, + 55.45966217612125 + ], + [ + 10.61573327048357, + 55.456854575508444 + ], + [ + 10.682953320357562, + 55.44790272889342 + ], + [ + 10.697927280406107, + 55.44025300148607 + ], + [ + 10.796641473755175, + 55.358547287253366 + ], + [ + 10.803070509728627, + 55.34975821298953 + ], + [ + 10.828379754095794, + 55.30658600604835 + ], + [ + 10.834320509038998, + 55.290269273426425 + ], + [ + 10.826914910038992, + 55.290269273426425 + ], + [ + 10.81511477917325, + 55.30084869093182 + ], + [ + 10.79688561251295, + 55.30768463164912 + ], + [ + 10.78028404917265, + 55.30491770089634 + ], + [ + 10.772959831725215, + 55.28709544197238 + ], + [ + 10.778656446705163, + 55.274847730764975 + ], + [ + 10.805349155799307, + 55.25043366978602 + ], + [ + 10.81332441511044, + 55.23558177421286 + ], + [ + 10.815196160178393, + 55.21405670996024 + ], + [ + 10.812673372377803, + 55.19428131015439 + ], + [ + 10.806000195150439, + 55.1773949146429 + ], + [ + 10.790782097261758, + 55.15692780488745 + ], + [ + 10.78785240970201, + 55.14948150870935 + ], + [ + 10.786631706615385, + 55.129461976750285 + ], + [ + 10.783376497889787, + 55.12421295678142 + ], + [ + 10.758799674638372, + 55.10529205499124 + ], + [ + 10.749359570941726, + 55.093085027352714 + ], + [ + 10.742686393808503, + 55.08169179687228 + ], + [ + 10.733653190783334, + 55.07184479159011 + ], + [ + 10.717784050139826, + 55.064276435556344 + ], + [ + 10.705577018990457, + 55.06256744989268 + ], + [ + 10.66627037913914, + 55.064276435556344 + ] + ] + ] + ] + }, + "type": "Feature", + "properties": { + "source": "https://simplemaps.com", + "id": "DK83", + "name": "Syddanmark" + }, + "id": 1 + }, + { + "geometry": { + "type": "MultiPolygon", + "coordinates": [ + [ + [ + [ + 9.675520593530397, + 55.70859818519244 + ], + [ + 9.675564814682227, + 55.70871880301719 + ], + [ + 9.679698927323647, + 55.71964835017899 + ], + [ + 9.680629104828782, + 55.72848503775071 + ], + [ + 9.675513138155502, + 55.73706332971859 + ], + [ + 9.663989289291997, + 55.7429543765599 + ], + [ + 9.641665073962033, + 55.74693350975045 + ], + [ + 9.608747184175755, + 55.74946561617856 + ], + [ + 9.597895136632635, + 55.7533930762072 + ], + [ + 9.596344843368806, + 55.76083447556116 + ], + [ + 9.598360225006857, + 55.76690646748059 + ], + [ + 9.602546012833916, + 55.77266835042036 + ], + [ + 9.605491573785457, + 55.77886956315117 + ], + [ + 9.600685661506827, + 55.78452809944401 + ], + [ + 9.581617059133832, + 55.78682771060045 + ], + [ + 9.546218706457406, + 55.78450230435875 + ], + [ + 9.530405715603825, + 55.78755118193151 + ], + [ + 9.516453079968407, + 55.797576426697766 + ], + [ + 9.491131627334669, + 55.82170931107715 + ], + [ + 9.468807407800863, + 55.83685050143775 + ], + [ + 9.462916296440518, + 55.84610060767762 + ], + [ + 9.459660681670151, + 55.853309463079334 + ], + [ + 9.458265416105375, + 55.85845124894569 + ], + [ + 9.450668982651276, + 55.865789329469074 + ], + [ + 9.399922725675944, + 55.90165276715717 + ], + [ + 9.374756301344647, + 55.91436514245439 + ], + [ + 9.343130320788484, + 55.92547553445804 + ], + [ + 9.249389277637226, + 55.94299390050005 + ], + [ + 9.211665482605554, + 55.93834303653782 + ], + [ + 9.184380325599165, + 55.907104616236445 + ], + [ + 9.137974890102475, + 55.873902523732745 + ], + [ + 9.12107669118206, + 55.86604763999335 + ], + [ + 9.10288658789021, + 55.86320544626469 + ], + [ + 9.093894893636694, + 55.859329773001974 + ], + [ + 9.085058218700397, + 55.853541949899 + ], + [ + 9.079787223967422, + 55.8482709664429 + ], + [ + 9.070950553214702, + 55.835713584575274 + ], + [ + 9.058083123524849, + 55.836617956922545 + ], + [ + 9.047334428007561, + 55.830907732000924 + ], + [ + 9.013072949743426, + 55.8287889521982 + ], + [ + 8.999275340451677, + 55.834731729562215 + ], + [ + 8.959381140724814, + 55.86400649390912 + ], + [ + 8.929098749185627, + 55.88000033168568 + ], + [ + 8.880057814373021, + 55.882584143794276 + ], + [ + 8.843470898212058, + 55.87230050114686 + ], + [ + 8.828278026543742, + 55.84284492031668 + ], + [ + 8.817684358139942, + 55.833749880058846 + ], + [ + 8.82295535353463, + 55.81680000457975 + ], + [ + 8.823110387699552, + 55.81034049952672 + ], + [ + 8.80698733725518, + 55.80455269904055 + ], + [ + 8.774586213717082, + 55.7988166042023 + ], + [ + 8.757481315770384, + 55.80178801830286 + ], + [ + 8.687201370536842, + 55.82995170572608 + ], + [ + 8.654593538502695, + 55.83478341441331 + ], + [ + 8.64405154841968, + 55.833904933787856 + ], + [ + 8.62761844153198, + 55.82979665549971 + ], + [ + 8.610875277338248, + 55.827936303139445 + ], + [ + 8.598886348943562, + 55.829202417393134 + ], + [ + 8.588499385666147, + 55.831476173896185 + ], + [ + 8.578732535573415, + 55.83524849887616 + ], + [ + 8.569585809065101, + 55.83540355393319 + ], + [ + 8.560749141086577, + 55.83411167230695 + ], + [ + 8.55361779392601, + 55.831527857785694 + ], + [ + 8.548501822773193, + 55.82868558605206 + ], + [ + 8.54912194349245, + 55.82545586408289 + ], + [ + 8.554237906052201, + 55.81912543998764 + ], + [ + 8.555943231512725, + 55.81607657225882 + ], + [ + 8.555478140129413, + 55.81349270440006 + ], + [ + 8.553307732933744, + 55.811296498605216 + ], + [ + 8.503026561730286, + 55.7792829105895 + ], + [ + 8.490159131276158, + 55.77147978437919 + ], + [ + 8.471969027931252, + 55.76941274952962 + ], + [ + 8.459721712590886, + 55.77106637546227 + ], + [ + 8.42447838826987, + 55.78375297076611 + ], + [ + 8.39920861463416, + 55.7901091909284 + ], + [ + 8.368306106083875, + 55.80253734910015 + ], + [ + 8.175811394924775, + 55.80147796784839 + ], + [ + 8.17560707357738, + 55.80147645034364 + ], + [ + 8.135590041469221, + 55.96600985405177 + ], + [ + 8.13070722865471, + 55.97614169271765 + ], + [ + 8.12777753920905, + 55.97821685265813 + ], + [ + 8.126800976441542, + 55.98297759799166 + ], + [ + 8.127777541827136, + 55.98761630419096 + ], + [ + 8.130707225214998, + 55.989732138421765 + ], + [ + 8.136729364638711, + 55.98859286446319 + ], + [ + 8.139414911500449, + 55.98602938547778 + ], + [ + 8.141123892420685, + 55.98346586439417 + ], + [ + 8.14405358106405, + 55.982326551607095 + ], + [ + 8.150564000043884, + 55.977036880224276 + ], + [ + 8.175466343170756, + 55.90033602172184 + ], + [ + 8.193369988373481, + 55.87384675232547 + ], + [ + 8.195974154880979, + 55.862779037301955 + ], + [ + 8.193614129273355, + 55.853216867899455 + ], + [ + 8.183848503707283, + 55.83950429481554 + ], + [ + 8.181651237176492, + 55.828355198223186 + ], + [ + 8.18506920778882, + 55.814642656277755 + ], + [ + 8.193614127332868, + 55.81183500418458 + ], + [ + 8.238536001515204, + 55.82607652971108 + ], + [ + 8.286306187323277, + 55.84747957767322 + ], + [ + 8.308848506627879, + 55.85199619797794 + ], + [ + 8.347829621677539, + 55.86933012648509 + ], + [ + 8.38770592589628, + 55.89288972847932 + ], + [ + 8.39332116068166, + 55.909165766480484 + ], + [ + 8.38689212581569, + 55.926499781176844 + ], + [ + 8.372894726673353, + 55.943426820448494 + ], + [ + 8.356944207345775, + 55.9586856188232 + ], + [ + 8.319509312533606, + 55.98602938547778 + ], + [ + 8.319509310435356, + 56.01703521509781 + ], + [ + 8.308116080454482, + 56.05292387336136 + ], + [ + 8.28239993859334, + 56.07587315039778 + ], + [ + 8.247731967723352, + 56.08982983019081 + ], + [ + 8.17058353044788, + 56.10956452636605 + ], + [ + 8.149668817704294, + 56.1111514582867 + ], + [ + 8.14063561281474, + 56.10272858035521 + ], + [ + 8.141612174763925, + 56.06000396962665 + ], + [ + 8.139903189371493, + 56.039740278720366 + ], + [ + 8.133799674448005, + 56.01703521509781 + ], + [ + 8.137461782535219, + 56.00039293572009 + ], + [ + 8.134776240110654, + 55.993963964223944 + ], + [ + 8.120127799871216, + 55.996568099151304 + ], + [ + 8.115977409379386, + 56.00185780609272 + ], + [ + 8.11166425995508, + 56.01227449470862 + ], + [ + 8.107920767400937, + 56.02431872904197 + ], + [ + 8.103363476194632, + 56.09686919645187 + ], + [ + 8.106455927004376, + 56.119452244709215 + ], + [ + 8.130625844773292, + 56.181952184249475 + ], + [ + 8.133799676059224, + 56.20547111108664 + ], + [ + 8.122406450734253, + 56.551662568936344 + ], + [ + 8.12663822089232, + 56.568426879980585 + ], + [ + 8.133799679348174, + 56.58563880033836 + ], + [ + 8.17872155340174, + 56.6720645676193 + ], + [ + 8.198985226836095, + 56.6988793664857 + ], + [ + 8.213877804427952, + 56.711818813657985 + ], + [ + 8.219981318678856, + 56.70917393227664 + ], + [ + 8.223968949118222, + 56.70042556637944 + ], + [ + 8.233164912618985, + 56.695461366397815 + ], + [ + 8.23243248375862, + 56.68838119303595 + ], + [ + 8.209971544562185, + 56.65595116313434 + ], + [ + 8.202159047595972, + 56.64704007242005 + ], + [ + 8.2021590500083, + 56.640204169114746 + ], + [ + 8.208343948979776, + 56.63678624159911 + ], + [ + 8.21509849872238, + 56.63495507137631 + ], + [ + 8.233164915441412, + 56.63397865874161 + ], + [ + 8.236582880130427, + 56.62970613772931 + ], + [ + 8.240489133298649, + 56.62006262680262 + ], + [ + 8.245778843101515, + 56.610419027317825 + ], + [ + 8.253916862920942, + 56.60610585990039 + ], + [ + 8.29493248620512, + 56.60199613618065 + ], + [ + 8.305918821033613, + 56.59780514951501 + ], + [ + 8.291514519671043, + 56.593003657283404 + ], + [ + 8.295176630571296, + 56.58380770573496 + ], + [ + 8.292491083107722, + 56.56769441833343 + ], + [ + 8.298350455167522, + 56.55829496364317 + ], + [ + 8.30738366576653, + 56.55434812871056 + ], + [ + 8.315196163519667, + 56.55687088360297 + ], + [ + 8.318044469189315, + 56.56362542321678 + ], + [ + 8.31202232798222, + 56.57196680851626 + ], + [ + 8.312022333747704, + 56.578802826129255 + ], + [ + 8.343516469891366, + 56.584540076999275 + ], + [ + 8.3921004514014, + 56.58527245144984 + ], + [ + 8.436534048243395, + 56.58022693110437 + ], + [ + 8.455902534734788, + 56.568548823521304 + ], + [ + 8.465179884770581, + 56.56688061144914 + ], + [ + 8.51465905367859, + 56.54466385362288 + ], + [ + 8.528493687631835, + 56.54816317497519 + ], + [ + 8.541351758003476, + 56.55683013160224 + ], + [ + 8.552093949745739, + 56.56801997427812 + ], + [ + 8.559580926799763, + 56.578802826129255 + ], + [ + 8.565765822801042, + 56.578802826129255 + ], + [ + 8.596039261955584, + 56.53180577050534 + ], + [ + 8.596853059489398, + 56.524155971805776 + ], + [ + 8.593028193341276, + 56.51992425231421 + ], + [ + 8.594574417555165, + 56.51048415117733 + ], + [ + 8.601084833490955, + 56.50112541293175 + ], + [ + 8.617442250926125, + 56.49380113297416 + ], + [ + 8.636973500229224, + 56.47972234776898 + ], + [ + 8.648448107247043, + 56.475734691458456 + ], + [ + 8.665537951948185, + 56.47589746091975 + ], + [ + 8.726573114561422, + 56.48322176758303 + ], + [ + 8.73316490479291, + 56.48505273340931 + ], + [ + 8.73853600676175, + 56.49005772035075 + ], + [ + 8.742035355241322, + 56.497748156637826 + ], + [ + 8.743418814501615, + 56.5070661282958 + ], + [ + 8.743662962575444, + 56.52167396828435 + ], + [ + 8.744965041888491, + 56.53156161581221 + ], + [ + 8.750824417449953, + 56.552069435150976 + ], + [ + 8.755137568315167, + 56.55780673836247 + ], + [ + 8.760427284411445, + 56.56012610181757 + ], + [ + 8.76254316274512, + 56.563381222455426 + ], + [ + 8.757578967766854, + 56.57196680851626 + ], + [ + 8.726573117665046, + 56.58563880033836 + ], + [ + 8.716563346613768, + 56.58616770491466 + ], + [ + 8.70899499330235, + 56.588568496853576 + ], + [ + 8.704437690509964, + 56.59369530764334 + ], + [ + 8.702972851165326, + 56.60236236099911 + ], + [ + 8.698496946254393, + 56.6112328842656 + ], + [ + 8.68148847584155, + 56.61627837169969 + ], + [ + 8.681895374489326, + 56.62653223444577 + ], + [ + 8.695160357432876, + 56.633246232606275 + ], + [ + 8.740896024384549, + 56.63495507137631 + ], + [ + 8.80543053780102, + 56.695461366397815 + ], + [ + 8.82048586887807, + 56.70038477953351 + ], + [ + 8.861582884263688, + 56.7074649789459 + ], + [ + 8.916758656161551, + 56.7088076347582 + ], + [ + 8.918711785010679, + 56.71100495013594 + ], + [ + 8.894786002955312, + 56.71596912956676 + ], + [ + 8.851084837591616, + 56.71678301724762 + ], + [ + 8.84083092457538, + 56.72235748350683 + ], + [ + 8.850108273028225, + 56.74017980702219 + ], + [ + 8.864756707674243, + 56.7558454536888 + ], + [ + 8.876638219351777, + 56.76430901626274 + ], + [ + 8.908457873771225, + 56.777411132976084 + ], + [ + 8.955739786007904, + 56.80296466155356 + ], + [ + 8.974294470882054, + 56.80536535136014 + ], + [ + 8.985199421170517, + 56.804144676365254 + ], + [ + 9.0036727217887, + 56.79905833232055 + ], + [ + 9.014903196652808, + 56.797837703564795 + ], + [ + 9.02662193485363, + 56.79962791928897 + ], + [ + 9.032725452567554, + 56.80377833100231 + ], + [ + 9.037608267899401, + 56.808539116103795 + ], + [ + 9.045583532032277, + 56.81220126463207 + ], + [ + 9.063161656417934, + 56.812567467848716 + ], + [ + 9.086110868116132, + 56.80841699766937 + ], + [ + 9.1059676383431, + 56.80011620710157 + ], + [ + 9.114512561906691, + 56.787990575730376 + ], + [ + 9.120453322073976, + 56.76459383044985 + ], + [ + 9.134450717557533, + 56.75071849897378 + ], + [ + 9.150564001374276, + 56.74091221914551 + ], + [ + 9.162852406713741, + 56.72964097699567 + ], + [ + 9.1760360039933, + 56.7085635439155 + ], + [ + 9.148203978170283, + 56.70473875115853 + ], + [ + 9.118825722949117, + 56.678778461681915 + ], + [ + 9.093435088713615, + 56.6749941666245 + ], + [ + 9.093435086383348, + 56.66754784827368 + ], + [ + 9.097504105725617, + 56.658433381902476 + ], + [ + 9.086680532204243, + 56.649603547752214 + ], + [ + 9.05933679098743, + 56.63397865874161 + ], + [ + 9.049978067127956, + 56.61981850462772 + ], + [ + 9.048838737638105, + 56.60716379941246 + ], + [ + 9.052989131355975, + 56.5753848628679 + ], + [ + 9.060557486289383, + 56.56574125632476 + ], + [ + 9.076996288499137, + 56.567368863017855 + ], + [ + 9.093435087195209, + 56.57807031935969 + ], + [ + 9.106781449770896, + 56.61078527048138 + ], + [ + 9.1215926449409, + 56.613023190826624 + ], + [ + 9.13965905123838, + 56.611029379536134 + ], + [ + 9.15544681268776, + 56.61294182013723 + ], + [ + 9.149180539434546, + 56.62250402856251 + ], + [ + 9.14112389078222, + 56.62604397164926 + ], + [ + 9.131521030945358, + 56.62482332086677 + ], + [ + 9.121348500674664, + 56.620347356204164 + ], + [ + 9.128916862668182, + 56.63125234183311 + ], + [ + 9.13672935922895, + 56.63906479870432 + ], + [ + 9.143728062853125, + 56.64769117121754 + ], + [ + 9.148610869280976, + 56.661322286443976 + ], + [ + 9.15544680727819, + 56.661322286443976 + ], + [ + 9.149424677058171, + 56.63886142077581 + ], + [ + 9.175547716642097, + 56.63544336008088 + ], + [ + 9.211192256072056, + 56.63886142077581 + ], + [ + 9.233653188641572, + 56.637152381729756 + ], + [ + 9.247243691767405, + 56.63202552947973 + ], + [ + 9.262461791008482, + 56.63373451634153 + ], + [ + 9.276215039257542, + 56.632961320826276 + ], + [ + 9.28532961961485, + 56.620347356204164 + ], + [ + 9.282399942377218, + 56.610541161781065 + ], + [ + 9.25945071226655, + 56.5919456463518 + ], + [ + 9.250336135944972, + 56.578802826129255 + ], + [ + 9.261241083752829, + 56.576361416721014 + ], + [ + 9.26335696252233, + 56.57196680851626 + ], + [ + 9.264008014935795, + 56.56590411255629 + ], + [ + 9.270843943952746, + 56.55829496364317 + ], + [ + 9.280609571315143, + 56.5528018288988 + ], + [ + 9.305674679020324, + 56.54466385362288 + ], + [ + 9.309743687854509, + 56.54051343688258 + ], + [ + 9.312185087346867, + 56.535101572590754 + ], + [ + 9.316416861771847, + 56.530829153851734 + ], + [ + 9.326182484805742, + 56.530340847643835 + ], + [ + 9.340586786264598, + 56.547064535549005 + ], + [ + 9.346690302616796, + 56.552069435150976 + ], + [ + 9.360850457597001, + 56.54474519132456 + ], + [ + 9.373057491453492, + 56.552720487311284 + ], + [ + 9.372731970605795, + 56.56537511116512 + ], + [ + 9.350352405480313, + 56.57196680851626 + ], + [ + 9.319346545381228, + 56.56171290109638 + ], + [ + 9.31739342937094, + 56.55866125386369 + ], + [ + 9.312836131415686, + 56.55776598613549 + ], + [ + 9.30225670494582, + 56.55829496364317 + ], + [ + 9.293711784646893, + 56.56102122163668 + ], + [ + 9.293142126405458, + 56.56679926306699 + ], + [ + 9.296153190743057, + 56.571600649826415 + ], + [ + 9.298838733505216, + 56.57196680851626 + ], + [ + 9.30062910457032, + 56.60911696069075 + ], + [ + 9.317067908508559, + 56.641099394148235 + ], + [ + 9.326019727647243, + 56.66941967194571 + ], + [ + 9.397700635418184, + 56.64982411537245 + ], + [ + 9.435424432982122, + 56.63850699479308 + ], + [ + 9.459557321721157, + 56.661192890710765 + ], + [ + 9.450720657778541, + 56.67522311645477 + ], + [ + 9.458782181578247, + 56.68511915375927 + ], + [ + 9.487669308539491, + 56.68382726504193 + ], + [ + 9.502500439101365, + 56.6747321430792 + ], + [ + 9.49629926598918, + 56.65088345400259 + ], + [ + 9.524049518039673, + 56.63370112937423 + ], + [ + 9.637737657719962, + 56.633003375379715 + ], + [ + 9.657323037027822, + 56.62246149512716 + ], + [ + 9.631278109720947, + 56.60579579757894 + ], + [ + 9.6240950904214, + 56.583962556098186 + ], + [ + 9.643267044123556, + 56.58044853094458 + ], + [ + 9.675926551527683, + 56.59042210945879 + ], + [ + 9.725432571655704, + 56.55908033064203 + ], + [ + 9.769305861512892, + 56.565643221762734 + ], + [ + 9.796125929180937, + 56.55740082095066 + ], + [ + 9.813024118136479, + 56.55525621344174 + ], + [ + 9.854623654962744, + 56.558925342707504 + ], + [ + 9.887903272277963, + 56.580655207959175 + ], + [ + 9.927694124798018, + 56.59352263550065 + ], + [ + 9.952033720682651, + 56.597682542861435 + ], + [ + 10.011616651631176, + 56.60153249863015 + ], + [ + 10.080139604291302, + 56.62514866636879 + ], + [ + 10.11347090760862, + 56.65129698155062 + ], + [ + 10.159979690639924, + 56.672897668630384 + ], + [ + 10.193207225917762, + 56.69358957284981 + ], + [ + 10.24195396672329, + 56.68838119303595 + ], + [ + 10.283539252222727, + 56.689764640577515 + ], + [ + 10.310313353081414, + 56.6988793664857 + ], + [ + 10.334239130379848, + 56.70384350727417 + ], + [ + 10.352793821325553, + 56.681830203880615 + ], + [ + 10.357676625439932, + 56.648749051623284 + ], + [ + 10.340342640230258, + 56.620347356204164 + ], + [ + 10.272959831999422, + 56.59170156499268 + ], + [ + 10.223317905785061, + 56.560614333816446 + ], + [ + 10.214366079744398, + 56.55829496364317 + ], + [ + 10.207286009551435, + 56.52578365849927 + ], + [ + 10.20386803460589, + 56.516669007559564 + ], + [ + 10.211436387802069, + 56.50531640321249 + ], + [ + 10.20679771512625, + 56.490952789539634 + ], + [ + 10.195485876750483, + 56.47768793532434 + ], + [ + 10.183360223469702, + 56.469549888608064 + ], + [ + 10.214121937067972, + 56.470445009698246 + ], + [ + 10.225596543197911, + 56.475734691458456 + ], + [ + 10.224375843213199, + 56.5136172124239 + ], + [ + 10.227549677979344, + 56.54108310846218 + ], + [ + 10.237640819277013, + 56.554388719672744 + ], + [ + 10.279551624031155, + 56.57196680851626 + ], + [ + 10.312673379960419, + 56.596625144422596 + ], + [ + 10.33033288604101, + 56.60321694217178 + ], + [ + 10.350922077818426, + 56.59589272056415 + ], + [ + 10.354502797988411, + 56.589544966709816 + ], + [ + 10.359873895737316, + 56.568101323249664 + ], + [ + 10.360850451865026, + 56.56171290109638 + ], + [ + 10.365244988116974, + 56.55841706029491 + ], + [ + 10.394867388490852, + 56.54466385362288 + ], + [ + 10.469899938085563, + 56.52090080589351 + ], + [ + 10.551524288002149, + 56.51557041270423 + ], + [ + 10.7890731072545, + 56.53636296181861 + ], + [ + 10.822276241715347, + 56.53424717644245 + ], + [ + 10.854991080092631, + 56.524155971805776 + ], + [ + 10.869883666428247, + 56.514715954978314 + ], + [ + 10.902110223950665, + 56.48322176758303 + ], + [ + 10.964121942817739, + 56.448431727088696 + ], + [ + 10.964121936444675, + 56.44220607415228 + ], + [ + 10.94092858163071, + 56.4295108051109 + ], + [ + 10.928477417391218, + 56.40648028795688 + ], + [ + 10.923513211340763, + 56.37986882224197 + ], + [ + 10.922618033382731, + 56.35626860491031 + ], + [ + 10.911631704165607, + 56.3340517950024 + ], + [ + 10.819590692574854, + 56.260931724839246 + ], + [ + 10.772959830036868, + 56.24298735090657 + ], + [ + 10.755381709754587, + 56.22931552665423 + ], + [ + 10.750010614168248, + 56.216050535586675 + ], + [ + 10.752614781991603, + 56.1849633044705 + ], + [ + 10.738617382364483, + 56.15452706733051 + ], + [ + 10.707041861396508, + 56.15086495862358 + ], + [ + 10.672211132208702, + 56.165187873532496 + ], + [ + 10.648936396170745, + 56.188381275627066 + ], + [ + 10.674652538819684, + 56.190822645183665 + ], + [ + 10.68523196885155, + 56.19623446208058 + ], + [ + 10.69044030232542, + 56.208889090199065 + ], + [ + 10.687673374160049, + 56.222235431569636 + ], + [ + 10.677582226989701, + 56.22630442888756 + ], + [ + 10.664561395217719, + 56.22760652522093 + ], + [ + 10.65259850718061, + 56.232733500568024 + ], + [ + 10.628591342609603, + 56.23753490463916 + ], + [ + 10.603770379290923, + 56.221014719176665 + ], + [ + 10.581228063378923, + 56.199204846051884 + ], + [ + 10.54916425926488, + 56.17820872891046 + ], + [ + 10.556162953399571, + 56.15493394643114 + ], + [ + 10.569346551712718, + 56.12946199981034 + ], + [ + 10.57374108462783, + 56.11261630686188 + ], + [ + 10.563975457235633, + 56.10561758259007 + ], + [ + 10.548024935703118, + 56.101263735729816 + ], + [ + 10.53101647058773, + 56.10016509041951 + ], + [ + 10.518565299760608, + 56.10272858035521 + ], + [ + 10.508474154770525, + 56.11102936146334 + ], + [ + 10.498545770566647, + 56.13019442032174 + ], + [ + 10.491221547884221, + 56.13996000459415 + ], + [ + 10.49919681404505, + 56.144720803654444 + ], + [ + 10.516123893287398, + 56.15159739013722 + ], + [ + 10.525401239604937, + 56.1536319206892 + ], + [ + 10.51823977802126, + 56.16478097018065 + ], + [ + 10.509450714583782, + 56.17011136833807 + ], + [ + 10.46810957178726, + 56.17706941171897 + ], + [ + 10.458994986631973, + 56.17609282583332 + ], + [ + 10.436696814314514, + 56.167914166830116 + ], + [ + 10.408702018531724, + 56.1648623667812 + ], + [ + 10.39144941382995, + 56.172674858942784 + ], + [ + 10.376231317212914, + 56.18667228154888 + ], + [ + 10.354665560373139, + 56.20205311998487 + ], + [ + 10.376963738445669, + 56.20331452497532 + ], + [ + 10.410411001249493, + 56.21849189839832 + ], + [ + 10.429209830565123, + 56.22247954306578 + ], + [ + 10.443369986057245, + 56.21662016245215 + ], + [ + 10.460703973550354, + 56.20620353517001 + ], + [ + 10.477793815274604, + 56.20233794997725 + ], + [ + 10.491221547670495, + 56.21629463828026 + ], + [ + 10.474294465611797, + 56.23224516865464 + ], + [ + 10.475433789057949, + 56.2446963455906 + ], + [ + 10.50473066378589, + 56.27777739763327 + ], + [ + 10.491221550133046, + 56.27765534246566 + ], + [ + 10.476817256555302, + 56.280910577202874 + ], + [ + 10.463633658741013, + 56.286851290087824 + ], + [ + 10.45362389553505, + 56.29450106097364 + ], + [ + 10.442637562441844, + 56.29865139461579 + ], + [ + 10.416758662202446, + 56.29189690144159 + ], + [ + 10.403005401553143, + 56.29824449470244 + ], + [ + 10.399261913045468, + 56.28485746726773 + ], + [ + 10.39380943982506, + 56.27651600492583 + ], + [ + 10.385020376505732, + 56.272162149219426 + ], + [ + 10.371104363750087, + 56.270941481364545 + ], + [ + 10.363780144896786, + 56.265570390009024 + ], + [ + 10.33692467808637, + 56.232733500568024 + ], + [ + 10.312510610110426, + 56.215155308543174 + ], + [ + 10.258474157378325, + 56.190253023870966 + ], + [ + 10.234222853864008, + 56.171332119119796 + ], + [ + 10.220225458008352, + 56.14789460944701 + ], + [ + 10.22689863443663, + 56.133246165955015 + ], + [ + 10.243418814366441, + 56.11957426748165 + ], + [ + 10.259043818326077, + 56.09902580934033 + ], + [ + 10.2656356101426, + 56.07371650664059 + ], + [ + 10.261485221238654, + 56.05442942036943 + ], + [ + 10.249196810122218, + 56.036932674033764 + ], + [ + 10.2311304043056, + 56.01703521509781 + ], + [ + 10.255137562685965, + 56.02362698238252 + ], + [ + 10.272715692250298, + 56.02484773716607 + ], + [ + 10.277517122476887, + 56.01776763907378 + ], + [ + 10.252452016499527, + 55.9862327865821 + ], + [ + 10.250010616155409, + 55.973456157879525 + ], + [ + 10.251638217451957, + 55.948187572141286 + ], + [ + 10.250743034523483, + 55.93073150657497 + ], + [ + 10.249278192880714, + 55.92304110441422 + ], + [ + 10.245371942682018, + 55.914618250163215 + ], + [ + 10.221364782105091, + 55.89691806005135 + ], + [ + 10.217539911543009, + 55.88983797563868 + ], + [ + 10.21265709711756, + 55.88987864934358 + ], + [ + 10.20183352784075, + 55.88589099327467 + ], + [ + 10.190196160125884, + 55.879706123442425 + ], + [ + 10.183360220740383, + 55.873724662544774 + ], + [ + 10.188324414683688, + 55.87116119637112 + ], + [ + 10.195567256022578, + 55.86131421818802 + ], + [ + 10.199392125101404, + 55.85073479208969 + ], + [ + 10.19060306115059, + 55.843491930727666 + ], + [ + 10.187347850030916, + 55.8386090943921 + ], + [ + 10.182139522466624, + 55.83368565382492 + ], + [ + 10.17269941221861, + 55.83148831601413 + ], + [ + 10.160411002783954, + 55.83331938899874 + ], + [ + 10.151052279933396, + 55.838120834232086 + ], + [ + 10.135508663205592, + 55.85199619797794 + ], + [ + 10.131032746498333, + 55.85846586967185 + ], + [ + 10.128103059151174, + 55.86587146466397 + ], + [ + 10.123789907315587, + 55.873277054013585 + ], + [ + 10.11451256658686, + 55.87986888877809 + ], + [ + 10.101817252446311, + 55.88239165103053 + ], + [ + 10.004649285580486, + 55.87986888877809 + ], + [ + 9.991709830895351, + 55.87734608308122 + ], + [ + 9.983653193685472, + 55.87140537243335 + ], + [ + 9.977386912242672, + 55.86456937069787 + ], + [ + 9.970469594876024, + 55.85944242509661 + ], + [ + 9.944509312659145, + 55.85358309450582 + ], + [ + 9.891774933764882, + 55.85537341358272 + ], + [ + 9.867442257120807, + 55.85199619797794 + ], + [ + 9.892588741269153, + 55.836249129653034 + ], + [ + 10.035655145253743, + 55.81842683961854 + ], + [ + 10.045746292487912, + 55.813950942984214 + ], + [ + 10.042165559380752, + 55.80341215812287 + ], + [ + 10.025645376136858, + 55.7830264005488 + ], + [ + 10.017344596426073, + 55.76557037328519 + ], + [ + 10.019786005212314, + 55.75787996418022 + ], + [ + 10.03370201774753, + 55.7560488803678 + ], + [ + 10.059825069219633, + 55.75641514351241 + ], + [ + 10.059825066405011, + 55.749497793718824 + ], + [ + 10.018809438913099, + 55.73627348057128 + ], + [ + 10.006846550727818, + 55.72504304453098 + ], + [ + 10.018239779702247, + 55.708563540511534 + ], + [ + 10.01449628875622, + 55.70685454442307 + ], + [ + 10.00521894536491, + 55.70111724754837 + ], + [ + 9.981700063351516, + 55.70941800486073 + ], + [ + 9.871836781868152, + 55.69122952175045 + ], + [ + 9.821787958045313, + 55.67597078249979 + ], + [ + 9.792246939769703, + 55.67446522124108 + ], + [ + 9.730967646163029, + 55.688137137099474 + ], + [ + 9.695648634080827, + 55.70579661797767 + ], + [ + 9.686534049712154, + 55.708563540511534 + ], + [ + 9.675520593530397, + 55.70859818519244 + ] + ] + ], + [ + [ + [ + 10.559418164909685, + 55.86627838599967 + ], + [ + 10.573090039809683, + 55.87885162789616 + ], + [ + 10.58106530288328, + 55.894517351824156 + ], + [ + 10.580251495725154, + 55.91152575789926 + ], + [ + 10.566905146115145, + 55.9282901293565 + ], + [ + 10.548594598982612, + 55.93573641692674 + ], + [ + 10.528330921993893, + 55.94061917469174 + ], + [ + 10.515391470489508, + 55.951239308247004 + ], + [ + 10.51856530214067, + 55.97614169271765 + ], + [ + 10.528819203878422, + 55.9892031574574 + ], + [ + 10.543955923481946, + 55.99799223822899 + ], + [ + 10.556813997752535, + 55.99579498626887 + ], + [ + 10.564219599734619, + 55.953843522185075 + ], + [ + 10.580251497587632, + 55.936509502452424 + ], + [ + 10.602793813101835, + 55.92523831610057 + ], + [ + 10.628265823142186, + 55.92084384052331 + ], + [ + 10.619395376667798, + 55.910956096360565 + ], + [ + 10.615407749520154, + 55.90033602172184 + ], + [ + 10.616384314085106, + 55.8897973019432 + ], + [ + 10.621429884616246, + 55.87986888877809 + ], + [ + 10.63868248857638, + 55.86611563333127 + ], + [ + 10.650645377196785, + 55.8701032372131 + ], + [ + 10.659190299624619, + 55.880764060884594 + ], + [ + 10.66627038176776, + 55.88670485031125 + ], + [ + 10.669281447028816, + 55.88003165426549 + ], + [ + 10.655039910296452, + 55.86538320825103 + ], + [ + 10.628265822036479, + 55.84577058640335 + ], + [ + 10.62330162907934, + 55.82770416987296 + ], + [ + 10.624359568315947, + 55.79433825341711 + ], + [ + 10.614593948937907, + 55.77684157142379 + ], + [ + 10.58220462440506, + 55.75975171056095 + ], + [ + 10.547129754171236, + 55.76520417090167 + ], + [ + 10.520274288252132, + 55.78856032520235 + ], + [ + 10.511566604981441, + 55.82526279518501 + ], + [ + 10.517263218093392, + 55.84552644615681 + ], + [ + 10.527679882257631, + 55.85390857664946 + ], + [ + 10.54216556208591, + 55.85814037904115 + ], + [ + 10.559418164909685, + 55.86627838599967 + ] + ] + ], + [ + [ + [ + 11.512217647662029, + 56.695461366397815 + ], + [ + 11.509532097396018, + 56.710150461934816 + ], + [ + 11.520843948609365, + 56.71539950690267 + ], + [ + 11.606130397217369, + 56.72040428731851 + ], + [ + 11.649424670821913, + 56.72964097699567 + ], + [ + 11.649424671478307, + 56.722154004430884 + ], + [ + 11.645192909043846, + 56.72199131770778 + ], + [ + 11.64193770119654, + 56.720648556042256 + ], + [ + 11.635752798633375, + 56.71596912956676 + ], + [ + 11.618011914182572, + 56.711330462951814 + ], + [ + 11.604258662022403, + 56.70115796493851 + ], + [ + 11.592539905192151, + 56.68984604853613 + ], + [ + 11.581065296019627, + 56.68121975465745 + ], + [ + 11.564219592051636, + 56.6774355650139 + ], + [ + 11.543711781021791, + 56.67865627754905 + ], + [ + 11.524587440795575, + 56.684719190626815 + ], + [ + 11.512217647662029, + 56.695461366397815 + ] + ] + ] + ] + }, + "type": "Feature", + "properties": { + "source": "https://simplemaps.com", + "id": "DK82", + "name": "Midtjylland" + }, + "id": 2 + }, + { + "geometry": { + "type": "MultiPolygon", + "coordinates": [ + [ + [ + [ + 10.193207225917762, + 56.69358957284981 + ], + [ + 10.159979690639924, + 56.672897668630384 + ], + [ + 10.11347090760862, + 56.65129698155062 + ], + [ + 10.080139604291302, + 56.62514866636879 + ], + [ + 10.011616651631176, + 56.60153249863015 + ], + [ + 9.952033720682651, + 56.597682542861435 + ], + [ + 9.927694124798018, + 56.59352263550065 + ], + [ + 9.887903272277963, + 56.580655207959175 + ], + [ + 9.854623654962744, + 56.558925342707504 + ], + [ + 9.813024118136479, + 56.55525621344174 + ], + [ + 9.796125929180937, + 56.55740082095066 + ], + [ + 9.769305861512892, + 56.565643221762734 + ], + [ + 9.725432571655704, + 56.55908033064203 + ], + [ + 9.675926551527683, + 56.59042210945879 + ], + [ + 9.643267044123556, + 56.58044853094458 + ], + [ + 9.6240950904214, + 56.583962556098186 + ], + [ + 9.631278109720947, + 56.60579579757894 + ], + [ + 9.657323037027822, + 56.62246149512716 + ], + [ + 9.637737657719962, + 56.633003375379715 + ], + [ + 9.524049518039673, + 56.63370112937423 + ], + [ + 9.49629926598918, + 56.65088345400259 + ], + [ + 9.502500439101365, + 56.6747321430792 + ], + [ + 9.487669308539491, + 56.68382726504193 + ], + [ + 9.458782181578247, + 56.68511915375927 + ], + [ + 9.450720657778541, + 56.67522311645477 + ], + [ + 9.459557321721157, + 56.661192890710765 + ], + [ + 9.435424432982122, + 56.63850699479308 + ], + [ + 9.397700635418184, + 56.64982411537245 + ], + [ + 9.326019727647243, + 56.66941967194571 + ], + [ + 9.325938344795059, + 56.669623086927594 + ], + [ + 9.30567467796015, + 56.695461366397815 + ], + [ + 9.295909052133293, + 56.701564872254046 + ], + [ + 9.290700718240487, + 56.70384350727417 + ], + [ + 9.270843941505182, + 56.70233790253155 + ], + [ + 9.230642123673102, + 56.692084060344044 + ], + [ + 9.220713737140725, + 56.68427154133272 + ], + [ + 9.19906659923478, + 56.676581087586726 + ], + [ + 9.177907742865939, + 56.67584863595898 + ], + [ + 9.169118682428925, + 56.68866603842685 + ], + [ + 9.180349156172545, + 56.70075105861235 + ], + [ + 9.226410355946934, + 56.716254021923106 + ], + [ + 9.236827022822274, + 56.733059033304265 + ], + [ + 9.242523636752741, + 56.74705641603474 + ], + [ + 9.240489123041355, + 56.75397362632163 + ], + [ + 9.215505405379606, + 56.75885651670541 + ], + [ + 9.205251495397771, + 56.76382067270825 + ], + [ + 9.196543822394405, + 56.77045327140939 + ], + [ + 9.189707873606142, + 56.777411132976084 + ], + [ + 9.184418163881741, + 56.784002983098276 + ], + [ + 9.17872155543238, + 56.79413490055297 + ], + [ + 9.174978064499332, + 56.806057076519906 + ], + [ + 9.176036004316222, + 56.81842682893014 + ], + [ + 9.181162951086751, + 56.821722649535694 + ], + [ + 9.204112170121496, + 56.84162012357209 + ], + [ + 9.206716339517602, + 56.84634020526394 + ], + [ + 9.21143639362597, + 56.851507875370906 + ], + [ + 9.210622587097715, + 56.863104498353664 + ], + [ + 9.205902536166544, + 56.874579121565155 + ], + [ + 9.199554885233635, + 56.87982820727238 + ], + [ + 9.187836134100765, + 56.88117096724837 + ], + [ + 9.178965689998103, + 56.884588921568565 + ], + [ + 9.16285241159137, + 56.89411043877302 + ], + [ + 9.181488472052713, + 56.91909408168849 + ], + [ + 9.20020593085657, + 56.93891029539453 + ], + [ + 9.220469592648225, + 56.95506414134463 + ], + [ + 9.251475460080915, + 56.97256097784063 + ], + [ + 9.258474158844786, + 56.97483963218505 + ], + [ + 9.265961134330604, + 56.97622305305322 + ], + [ + 9.274668819351598, + 56.976629991058175 + ], + [ + 9.279958533450221, + 56.979803820239475 + ], + [ + 9.285492381911617, + 56.99396390845612 + ], + [ + 9.28874759097369, + 56.99713774945202 + ], + [ + 9.437022326862003, + 57.02440007021194 + ], + [ + 9.470062699583565, + 57.02383051394371 + ], + [ + 9.586761913940776, + 56.99713774945202 + ], + [ + 9.586761914307125, + 56.98969146479006 + ], + [ + 9.573985227151065, + 56.985012172117095 + ], + [ + 9.573090034601545, + 56.97882715192989 + ], + [ + 9.58082116554944, + 56.97296790681679 + ], + [ + 9.59424889449745, + 56.96918366689119 + ], + [ + 9.607920772347711, + 56.9698754325948 + ], + [ + 9.617686399362848, + 56.97504309935661 + ], + [ + 9.654144722178824, + 57.006781260300976 + ], + [ + 9.670664905656531, + 57.02484767202911 + ], + [ + 9.688975452572684, + 57.03912989816185 + ], + [ + 9.728200718637952, + 57.04706453909633 + ], + [ + 9.761241079439936, + 57.058579790260886 + ], + [ + 9.809092639078795, + 57.051743825102506 + ], + [ + 9.915782094399411, + 57.058579790260886 + ], + [ + 9.856618690547672, + 57.08869054927573 + ], + [ + 9.826670765685359, + 57.094875354786474 + ], + [ + 9.80640709108457, + 57.10423404867845 + ], + [ + 9.795664914140982, + 57.1063907398754 + ], + [ + 9.787608269593742, + 57.10394929286969 + ], + [ + 9.752452021552083, + 57.07778557865701 + ], + [ + 9.733246293020736, + 57.066799256182435 + ], + [ + 9.711924672158709, + 57.06126533382857 + ], + [ + 9.693207222978161, + 57.06915919996528 + ], + [ + 9.67514082511577, + 57.07916906020953 + ], + [ + 9.656097851594938, + 57.07636139024477 + ], + [ + 9.621104360476695, + 57.058579790260886 + ], + [ + 9.580821165982902, + 57.04930254682269 + ], + [ + 9.49398847731529, + 57.04857005776567 + ], + [ + 9.265147333138623, + 57.00340405295286 + ], + [ + 9.245371946715599, + 57.00234616619543 + ], + [ + 9.22641035283153, + 57.00702546823144 + ], + [ + 9.114512563609562, + 57.058579790260886 + ], + [ + 9.12086021916716, + 57.045396190770155 + ], + [ + 9.112803585231902, + 57.0386417092328 + ], + [ + 9.07984459343319, + 57.031236025426516 + ], + [ + 9.05543053610782, + 57.0211449378836 + ], + [ + 9.042653837056765, + 57.01772689295164 + ], + [ + 9.025726762414081, + 57.01756423793354 + ], + [ + 9.031993038417813, + 57.02383051394371 + ], + [ + 9.014008008506512, + 57.030178120787184 + ], + [ + 8.996755399371759, + 57.02806217101133 + ], + [ + 8.979665556300953, + 57.020493822607065 + ], + [ + 8.963145375288958, + 57.01019923899576 + ], + [ + 8.94695071096266, + 57.011175771395216 + ], + [ + 8.926442906796579, + 57.00014885285384 + ], + [ + 8.909027544838914, + 56.98818600169403 + ], + [ + 8.901621935020133, + 56.986273428690076 + ], + [ + 8.893077024450848, + 56.999660618600494 + ], + [ + 8.873220252796639, + 57.004828253376544 + ], + [ + 8.850433792719473, + 57.00348545388797 + ], + [ + 8.833343945024032, + 56.99713774945202 + ], + [ + 8.847992379948721, + 56.98847071799051 + ], + [ + 8.81169681407025, + 56.9698754325948 + ], + [ + 8.762054878697858, + 56.95547102720816 + ], + [ + 8.726410350806923, + 56.96124907545893 + ], + [ + 8.702972852934105, + 56.958441485199685 + ], + [ + 8.67953535683694, + 56.952541471355794 + ], + [ + 8.668793162554593, + 56.945298537922476 + ], + [ + 8.663340686287057, + 56.9259300181098 + ], + [ + 8.649912956851706, + 56.914129948048014 + ], + [ + 8.63412520108447, + 56.90517819402782 + ], + [ + 8.62110436449729, + 56.89411043877302 + ], + [ + 8.615407753112041, + 56.86562741360491 + ], + [ + 8.614268421594348, + 56.863348654951395 + ], + [ + 8.612559441713975, + 56.85468171744676 + ], + [ + 8.607920764081065, + 56.84967675986452 + ], + [ + 8.601328976094548, + 56.84552648826429 + ], + [ + 8.569590691295334, + 56.81842682893014 + ], + [ + 8.547048372326387, + 56.817328183008854 + ], + [ + 8.536306189280742, + 56.814520618844725 + ], + [ + 8.531748888209458, + 56.80878319255899 + ], + [ + 8.524261913685656, + 56.804754932404556 + ], + [ + 8.507823108976027, + 56.801092787022505 + ], + [ + 8.49138430689679, + 56.79425684784418 + ], + [ + 8.48389733216455, + 56.78115469221353 + ], + [ + 8.49244224893334, + 56.74725976482086 + ], + [ + 8.491465686409867, + 56.73139063020126 + ], + [ + 8.476410349437536, + 56.722154004430884 + ], + [ + 8.476410351004446, + 56.71596912956676 + ], + [ + 8.516368040334106, + 56.718207169626496 + ], + [ + 8.525563996998654, + 56.71596912956676 + ], + [ + 8.533050982831165, + 56.703802880176575 + ], + [ + 8.527028840141444, + 56.695298545059046 + ], + [ + 8.514008003893059, + 56.69025292835464 + ], + [ + 8.50098717168919, + 56.68866603842685 + ], + [ + 8.489268420516233, + 56.69074121755575 + ], + [ + 8.465830925664426, + 56.70014070097824 + ], + [ + 8.45289146790178, + 56.70233790253155 + ], + [ + 8.440928578055477, + 56.70038477953351 + ], + [ + 8.420909055260468, + 56.69135170148675 + ], + [ + 8.408213734725315, + 56.68866603842685 + ], + [ + 8.408213735110149, + 56.68121975465745 + ], + [ + 8.455902536706724, + 56.68866603842685 + ], + [ + 8.470225457141865, + 56.68610260791589 + ], + [ + 8.49122155539534, + 56.67698809573879 + ], + [ + 8.50098717192777, + 56.6749941666245 + ], + [ + 8.571543819451136, + 56.69196202707372 + ], + [ + 8.586273630655944, + 56.68866603842685 + ], + [ + 8.589040566752278, + 56.681341940660026 + ], + [ + 8.586273631241475, + 56.66229885820681 + ], + [ + 8.586273637724014, + 56.65387609367957 + ], + [ + 8.601328973425783, + 56.64061108899264 + ], + [ + 8.608083535885937, + 56.63104897336726 + ], + [ + 8.593272330201621, + 56.61635974400443 + ], + [ + 8.58301841920802, + 56.60932030261431 + ], + [ + 8.56959069091792, + 56.60610585990039 + ], + [ + 8.555430537642662, + 56.60488519342441 + ], + [ + 8.546641468078725, + 56.60171127737501 + ], + [ + 8.547048371283905, + 56.597479535967835 + ], + [ + 8.559580925692737, + 56.593003657283404 + ], + [ + 8.539317259016187, + 56.592759574389575 + ], + [ + 8.511485223465519, + 56.60700106374487 + ], + [ + 8.486989779194154, + 56.62531158011541 + ], + [ + 8.476410349834978, + 56.637152381729756 + ], + [ + 8.467295774103585, + 56.65204505804329 + ], + [ + 8.444834834974284, + 56.664048610331655 + ], + [ + 8.416758663500735, + 56.6720645676193 + ], + [ + 8.391123890967474, + 56.6749941666245 + ], + [ + 8.373383009160847, + 56.68024323319805 + ], + [ + 8.352224160318617, + 56.692938616869085 + ], + [ + 8.332530138669888, + 56.70840070982771 + ], + [ + 8.319509308484943, + 56.722154004430884 + ], + [ + 8.308360221717031, + 56.75820546710781 + ], + [ + 8.302012570676782, + 56.763739389347506 + ], + [ + 8.288422073665112, + 56.766343528726495 + ], + [ + 8.26026451489687, + 56.77667872127639 + ], + [ + 8.24366295216143, + 56.777411132976084 + ], + [ + 8.258067257372966, + 56.740546012682344 + ], + [ + 8.263194204503616, + 56.71893948545044 + ], + [ + 8.260752802674645, + 56.705389753037885 + ], + [ + 8.245860216546964, + 56.704006176344954 + ], + [ + 8.238291863654226, + 56.72504304507891 + ], + [ + 8.236175980011796, + 56.77118565576224 + ], + [ + 8.24789472457522, + 56.81244534645829 + ], + [ + 8.270355669042594, + 56.84324784472327 + ], + [ + 8.44971764396072, + 57.003973699471075 + ], + [ + 8.503591341254559, + 57.03465402905361 + ], + [ + 8.586273637629715, + 57.1063907398754 + ], + [ + 8.618011910901679, + 57.122870130163264 + ], + [ + 8.65381921235167, + 57.12250404300879 + ], + [ + 8.737152543693501, + 57.1063907398754 + ], + [ + 8.783946155715057, + 57.10480372275003 + ], + [ + 8.874359575166626, + 57.11611568016025 + ], + [ + 8.914561392146, + 57.12742745245742 + ], + [ + 8.962575712992873, + 57.15656154042745 + ], + [ + 8.980479364164603, + 57.16103751666595 + ], + [ + 9.230642124980795, + 57.14109937528141 + ], + [ + 9.321543818177306, + 57.14704013242578 + ], + [ + 9.41211998301237, + 57.165431973072266 + ], + [ + 9.49781334921667, + 57.19700758037148 + ], + [ + 9.573252805895546, + 57.24290606150845 + ], + [ + 9.788422068857306, + 57.45929594142355 + ], + [ + 9.826426622084604, + 57.489935535048744 + ], + [ + 9.89535566188599, + 57.531561555731564 + ], + [ + 9.929860868015943, + 57.567450205798245 + ], + [ + 9.943125844089257, + 57.572495802224694 + ], + [ + 9.96697024625269, + 57.59137602438129 + ], + [ + 10.015391466633195, + 57.59662500704154 + ], + [ + 10.107676630245914, + 57.5929629717925 + ], + [ + 10.197601752086225, + 57.60122297314783 + ], + [ + 10.27955163042313, + 57.620917074480516 + ], + [ + 10.350596543789212, + 57.648993163945725 + ], + [ + 10.472504102440322, + 57.716986399689986 + ], + [ + 10.539073123548665, + 57.74384197559005 + ], + [ + 10.566091335304085, + 57.74945702735636 + ], + [ + 10.596039246679975, + 57.751165953393574 + ], + [ + 10.625010623569203, + 57.747504083893105 + ], + [ + 10.648936405789948, + 57.73700605045504 + ], + [ + 10.594411659178583, + 57.725897571991815 + ], + [ + 10.54249108070871, + 57.698919975328494 + ], + [ + 10.460459829101179, + 57.630560583017406 + ], + [ + 10.430430540016054, + 57.57054276384266 + ], + [ + 10.458343943742024, + 57.52545805079961 + ], + [ + 10.5089624423464, + 57.484035616757964 + ], + [ + 10.546560092322496, + 57.43528880743455 + ], + [ + 10.517425981356647, + 57.39130284858253 + ], + [ + 10.51791425630189, + 57.33917874911419 + ], + [ + 10.539073116234418, + 57.236721130644064 + ], + [ + 10.521983274402015, + 57.221747194977425 + ], + [ + 10.470876505300469, + 57.20115802377594 + ], + [ + 10.446787954934406, + 57.184881880649705 + ], + [ + 10.41146894625541, + 57.14443594277496 + ], + [ + 10.395518425172973, + 57.117254951883034 + ], + [ + 10.384043811583865, + 57.07420477179594 + ], + [ + 10.35922284794932, + 57.02610907262601 + ], + [ + 10.347178577715914, + 57.01019923899576 + ], + [ + 10.338715037560112, + 57.00324125120324 + ], + [ + 10.330577018513987, + 56.99933502265667 + ], + [ + 10.319672077093282, + 56.99750404005188 + ], + [ + 10.272471548787905, + 56.99787017059957 + ], + [ + 10.25798587590796, + 56.99583571119346 + ], + [ + 10.239756713812723, + 56.992824686562656 + ], + [ + 10.15601647584315, + 57.02383051394371 + ], + [ + 10.094004752469694, + 57.06049223780963 + ], + [ + 10.052744988464926, + 57.069728913241676 + ], + [ + 10.019379104210397, + 57.08844638099549 + ], + [ + 9.994639518823382, + 57.09271881499704 + ], + [ + 9.974457225044386, + 57.08649320878353 + ], + [ + 9.946136921951027, + 57.06118406523756 + ], + [ + 9.926931183396446, + 57.058579790260886 + ], + [ + 9.94548587611335, + 57.05731846249587 + ], + [ + 9.966563351090022, + 57.0722110515149 + ], + [ + 9.984141468531915, + 57.07904690263256 + ], + [ + 10.001963733615304, + 57.083889015322804 + ], + [ + 10.022634310099294, + 57.07908762181481 + ], + [ + 10.12126712738605, + 57.02212153518102 + ], + [ + 10.153981974072328, + 57.013251123064755 + ], + [ + 10.192393424078409, + 56.994045304730015 + ], + [ + 10.214366081261776, + 56.98969146479006 + ], + [ + 10.295095245040725, + 56.988592808349594 + ], + [ + 10.31373130941015, + 56.982855463413735 + ], + [ + 10.285655144008023, + 56.96369049708775 + ], + [ + 10.27955162265674, + 56.95551171584963 + ], + [ + 10.2747501931051, + 56.942287469959346 + ], + [ + 10.269297727983616, + 56.91502519830449 + ], + [ + 10.265879756860823, + 56.90770094571675 + ], + [ + 10.273122595887296, + 56.891343535084744 + ], + [ + 10.284027532911585, + 56.8403180224169 + ], + [ + 10.288340684608563, + 56.80011620710157 + ], + [ + 10.295420761969932, + 56.778875966029474 + ], + [ + 10.319997595966095, + 56.73647699892911 + ], + [ + 10.328461138857866, + 56.72980383177139 + ], + [ + 10.338226756182637, + 56.72394437183867 + ], + [ + 10.340586788492509, + 56.71735264462469 + ], + [ + 10.326833529992411, + 56.7085635439155 + ], + [ + 10.318695510216426, + 56.70823803648404 + ], + [ + 10.295176633413805, + 56.71430089703291 + ], + [ + 10.282562694792297, + 56.71596912956676 + ], + [ + 10.237559439797598, + 56.71596912956676 + ], + [ + 10.221853057010126, + 56.712103538354846 + ], + [ + 10.203868030052833, + 56.70233790253155 + ], + [ + 10.187022326060728, + 56.712225586314645 + ], + [ + 10.168142120581265, + 56.72003812579755 + ], + [ + 10.146494987512652, + 56.72235748350683 + ], + [ + 10.121267121811226, + 56.71596912956676 + ], + [ + 10.09896894920698, + 56.706122173325866 + ], + [ + 10.089610223946876, + 56.70404696407359 + ], + [ + 10.073496936116044, + 56.70233790253155 + ], + [ + 10.058360219570032, + 56.69822822935727 + ], + [ + 10.027517122065499, + 56.68427154133272 + ], + [ + 9.990896028394332, + 56.67694731460128 + ], + [ + 9.945567247257936, + 56.65814842735746 + ], + [ + 9.805918813125894, + 56.64704007242005 + ], + [ + 9.8059188160099, + 56.640204169114746 + ], + [ + 9.851328973639628, + 56.635728276072385 + ], + [ + 10.001475452934299, + 56.661322286443976 + ], + [ + 10.0349227211484, + 56.672837622282955 + ], + [ + 10.049571156368094, + 56.6749941666245 + ], + [ + 10.052907744111653, + 56.67715073872065 + ], + [ + 10.053477415171583, + 56.681830203880615 + ], + [ + 10.055430532508321, + 56.68650947863871 + ], + [ + 10.062836130080862, + 56.68866603842685 + ], + [ + 10.07097415206388, + 56.687486036632905 + ], + [ + 10.084320506657336, + 56.68235909040863 + ], + [ + 10.090586781531904, + 56.68121975465745 + ], + [ + 10.101328978273054, + 56.68268470808155 + ], + [ + 10.113617380144698, + 56.68671291437081 + ], + [ + 10.12378990426307, + 56.692328127151626 + ], + [ + 10.128103066973903, + 56.6988793664857 + ], + [ + 10.134776238897338, + 56.71238842412603 + ], + [ + 10.150564000449258, + 56.71442294659631 + ], + [ + 10.183360221992565, + 56.7085635439155 + ], + [ + 10.180837434851979, + 56.704901421099656 + ], + [ + 10.17579186623692, + 56.695461366397815 + ], + [ + 10.193207225917762, + 56.69358957284981 + ] + ] + ], + [ + [ + [ + 8.825856969761675, + 56.74384189976032 + ], + [ + 8.812673378665838, + 56.7345645934932 + ], + [ + 8.771250845969831, + 56.71596912956676 + ], + [ + 8.766123894216294, + 56.69871654182198 + ], + [ + 8.686371289849399, + 56.68659088501487 + ], + [ + 8.658376502289403, + 56.6781274027111 + ], + [ + 8.648448118645883, + 56.6810571077253 + ], + [ + 8.62175540132782, + 56.713934588286094 + ], + [ + 8.610850454396903, + 56.722154004430884 + ], + [ + 8.586273637299831, + 56.73647699892911 + ], + [ + 8.56226647450189, + 56.743231545393705 + ], + [ + 8.555918818677247, + 56.74384189976032 + ], + [ + 8.524912962136634, + 56.73729082587258 + ], + [ + 8.51465904501759, + 56.738226563114196 + ], + [ + 8.518077021320037, + 56.750067481121675 + ], + [ + 8.522797071260046, + 56.75507233648518 + ], + [ + 8.530284053862667, + 56.75934484572938 + ], + [ + 8.539235878677434, + 56.7625186979483 + ], + [ + 8.548675981815718, + 56.763739389347506 + ], + [ + 8.553233271403515, + 56.76813388809437 + ], + [ + 8.551605665700956, + 56.77777742035894 + ], + [ + 8.548024941480847, + 56.78725827819759 + ], + [ + 8.545258005929366, + 56.791083033980705 + ], + [ + 8.573008658771343, + 56.80255766764401 + ], + [ + 8.643809437635438, + 56.808050803585424 + ], + [ + 8.668793168758866, + 56.82583247345247 + ], + [ + 8.656260611032152, + 56.82514066607107 + ], + [ + 8.643809442234117, + 56.825873130282424 + ], + [ + 8.632009311119157, + 56.82807038157371 + ], + [ + 8.621104357469092, + 56.832017246840564 + ], + [ + 8.6267195968653, + 56.841538802218736 + ], + [ + 8.6467391341718, + 56.85635007016282 + ], + [ + 8.655121290684125, + 56.86684805800479 + ], + [ + 8.651866081506084, + 56.86603424049468 + ], + [ + 8.649750193195297, + 56.87425361106286 + ], + [ + 8.64844811291625, + 56.89069244889631 + ], + [ + 8.651621940004175, + 56.89419178589387 + ], + [ + 8.659353063425995, + 56.894476661896704 + ], + [ + 8.668223503385242, + 56.89378488992415 + ], + [ + 8.675059442506658, + 56.89411043877302 + ], + [ + 8.70378665196756, + 56.90005113433761 + ], + [ + 8.762461783754013, + 56.905422251813754 + ], + [ + 8.791758663322138, + 56.91453689399943 + ], + [ + 8.813649935876292, + 56.91034576840197 + ], + [ + 8.833750849708155, + 56.92475010187667 + ], + [ + 8.867442253361073, + 56.96295806680949 + ], + [ + 8.89039146858317, + 56.95482001030748 + ], + [ + 8.905039913766345, + 56.96600988106698 + ], + [ + 8.916270376054399, + 56.97907131846535 + ], + [ + 8.928884314226604, + 56.976629991058175 + ], + [ + 8.923106314335104, + 56.968573287795095 + ], + [ + 8.911387571526602, + 56.95612220749948 + ], + [ + 8.908457878292484, + 56.94871652996888 + ], + [ + 8.910411008579791, + 56.94440344544499 + ], + [ + 8.920420767962002, + 56.930487358762576 + ], + [ + 8.922699416490154, + 56.922023849000574 + ], + [ + 8.920420773233658, + 56.91990799900012 + ], + [ + 8.908457882794385, + 56.88727452950182 + ], + [ + 8.90040123690991, + 56.87482329504766 + ], + [ + 8.895274284874622, + 56.87030670639326 + ], + [ + 8.887950066702551, + 56.86684805800479 + ], + [ + 8.887705931117841, + 56.87872963740132 + ], + [ + 8.882660349616154, + 56.88637926843774 + ], + [ + 8.875010611930213, + 56.89109933727384 + ], + [ + 8.867442255540064, + 56.89411043877302 + ], + [ + 8.843760615967915, + 56.88344964921835 + ], + [ + 8.839610223185392, + 56.87982820727238 + ], + [ + 8.83643640016492, + 56.86928953448189 + ], + [ + 8.839040555035833, + 56.86286034211504 + ], + [ + 8.843597853262972, + 56.85822176327372 + ], + [ + 8.847015826836552, + 56.8530948691891 + ], + [ + 8.855153840189603, + 56.81976957467872 + ], + [ + 8.86394290915155, + 56.809515745438546 + ], + [ + 8.881114132841779, + 56.80536535136014 + ], + [ + 8.874766473001845, + 56.80036043188197 + ], + [ + 8.868418815326491, + 56.7980817643313 + ], + [ + 8.86133874037303, + 56.79755283256295 + ], + [ + 8.853282102551468, + 56.797837703564795 + ], + [ + 8.860687690766337, + 56.79490787758155 + ], + [ + 8.867442250813605, + 56.791083033980705 + ], + [ + 8.85417728371998, + 56.77993403397182 + ], + [ + 8.839121942538183, + 56.75726961788023 + ], + [ + 8.825856969761675, + 56.74384189976032 + ] + ] + ], + [ + [ + [ + 11.142344592013188, + 57.332342790595156 + ], + [ + 11.16993248552196, + 57.33270902701456 + ], + [ + 11.181488479074925, + 57.33030835589918 + ], + [ + 11.190114785573531, + 57.32550695910287 + ], + [ + 11.197276232006054, + 57.31338119168454 + ], + [ + 11.196501701216176, + 57.30282124550524 + ], + [ + 11.18911338963492, + 57.29428256779825 + ], + [ + 11.166875951785547, + 57.296775050740784 + ], + [ + 11.143231241170417, + 57.30135125618067 + ], + [ + 11.066579629887661, + 57.29132728616404 + ], + [ + 11.08269291039726, + 57.283880981911565 + ], + [ + 11.094164610441222, + 57.26899600315104 + ], + [ + 11.09110072083318, + 57.252784136695496 + ], + [ + 11.06716630688235, + 57.238361948600286 + ], + [ + 11.053517090142824, + 57.22620208381221 + ], + [ + 11.045137616154513, + 57.21271488076637 + ], + [ + 11.028835403866951, + 57.201914227924966 + ], + [ + 10.997904584524852, + 57.19791000976349 + ], + [ + 10.977961917198284, + 57.20673291716783 + ], + [ + 10.951814885380303, + 57.23372968091941 + ], + [ + 10.916536637386958, + 57.23736859527434 + ], + [ + 10.888799322871035, + 57.245344107837155 + ], + [ + 10.871104363000443, + 57.25462474200373 + ], + [ + 10.854991083793413, + 57.26463452563711 + ], + [ + 10.854991080001577, + 57.27090076223399 + ], + [ + 10.888424553471385, + 57.274864878955384 + ], + [ + 10.921263282411266, + 57.29228003550066 + ], + [ + 10.946601230285697, + 57.3046432384645 + ], + [ + 10.986869593747608, + 57.30599270295381 + ], + [ + 11.010083618981037, + 57.30987338244718 + ], + [ + 11.027723185323206, + 57.320690553163026 + ], + [ + 11.101927417912405, + 57.32041686322719 + ], + [ + 11.142344592013188, + 57.332342790595156 + ] + ] + ] + ] + }, + "type": "Feature", + "properties": { + "source": "https://simplemaps.com", + "id": "DK81", + "name": "Nordjylland" + }, + "id": 3 + }, + { + "geometry": { + "type": "MultiPolygon", + "coordinates": [ + [ + [ + [ + 11.845225460623086, + 54.731919700839164 + ], + [ + 11.833832223326482, + 54.72455471760293 + ], + [ + 11.82357830960239, + 54.72211323186349 + ], + [ + 11.814707868141676, + 54.71869526926492 + ], + [ + 11.806976772532268, + 54.70799402013389 + ], + [ + 11.846446167708578, + 54.69782143444661 + ], + [ + 11.84262129388001, + 54.67621495052116 + ], + [ + 11.810720241309175, + 54.65509667862272 + ], + [ + 11.766123906274881, + 54.6465519509475 + ], + [ + 11.610524932693899, + 54.66640856164108 + ], + [ + 11.5600692192957, + 54.659613476363404 + ], + [ + 11.512217656009998, + 54.6465519509475 + ], + [ + 11.474375856144098, + 54.62612548926378 + ], + [ + 11.45183351687243, + 54.628363209535415 + ], + [ + 11.358734583832662, + 54.66054935734039 + ], + [ + 11.297699420342623, + 54.69155513701995 + ], + [ + 11.187754757942226, + 54.73224522643956 + ], + [ + 11.138194215322757, + 54.73859293098146 + ], + [ + 11.084646021939323, + 54.753119120464696 + ], + [ + 11.030528194394009, + 54.76064691402952 + ], + [ + 11.009287963593168, + 54.77163327705111 + ], + [ + 10.999196797588041, + 54.786769777356184 + ], + [ + 11.00570721411709, + 54.80365617628867 + ], + [ + 11.030772325703206, + 54.81216048039607 + ], + [ + 11.06128989811818, + 54.80931211289472 + ], + [ + 11.08969161309564, + 54.81061444524824 + ], + [ + 11.108164902275016, + 54.83152895546649 + ], + [ + 11.09197025311721, + 54.835028442409026 + ], + [ + 11.078379745481078, + 54.841131810657885 + ], + [ + 11.025238487825996, + 54.879462098791386 + ], + [ + 11.018809449453293, + 54.886175940011945 + ], + [ + 11.018809438369066, + 54.903387733375105 + ], + [ + 11.03239994658802, + 54.91925701800389 + ], + [ + 11.073985230438417, + 54.948187658239775 + ], + [ + 11.088145372108036, + 54.944159172566664 + ], + [ + 11.231700077055248, + 54.96124920881458 + ], + [ + 11.25603274357512, + 54.954575866937105 + ], + [ + 11.303477410494047, + 54.9324405022349 + ], + [ + 11.321055545974465, + 54.92772056113519 + ], + [ + 11.338877810252818, + 54.91913494135289 + ], + [ + 11.36890708354355, + 54.88108944301853 + ], + [ + 11.392751509402464, + 54.872504093084274 + ], + [ + 11.416758674506582, + 54.86933029746676 + ], + [ + 11.594737161378692, + 54.811102464810304 + ], + [ + 11.585134305907584, + 54.82851791819645 + ], + [ + 11.573578315827504, + 54.83934148532813 + ], + [ + 11.572764522148955, + 54.84796787366821 + ], + [ + 11.594737163375743, + 54.85887266866664 + ], + [ + 11.622080913348329, + 54.85887266866664 + ], + [ + 11.646332237296038, + 54.86538331109131 + ], + [ + 11.656260602627857, + 54.86627828021659 + ], + [ + 11.647227398102201, + 54.877142522450825 + ], + [ + 11.644786004067926, + 54.88963450769874 + ], + [ + 11.646332228097899, + 54.90228914130091 + ], + [ + 11.649424665775003, + 54.913519503092054 + ], + [ + 11.725759305988953, + 54.87335841472575 + ], + [ + 11.731944195238176, + 54.85887266866664 + ], + [ + 11.755625837960297, + 54.841131810657885 + ], + [ + 11.770681189424478, + 54.83429600094265 + ], + [ + 11.7872827398027, + 54.83152895546649 + ], + [ + 11.777191614027863, + 54.82599530669044 + ], + [ + 11.772959840098588, + 54.82469318364205 + ], + [ + 11.78760826368005, + 54.806870780752284 + ], + [ + 11.837738478075808, + 54.78278230492996 + ], + [ + 11.848643412699744, + 54.77317930410614 + ], + [ + 11.85132897075778, + 54.756821994383195 + ], + [ + 11.85547935091771, + 54.749456975310345 + ], + [ + 11.85547936009498, + 54.74359769349557 + ], + [ + 11.845225460623086, + 54.731919700839164 + ] + ] + ], + [ + [ + [ + 12.417165560903817, + 55.03021881607416 + ], + [ + 12.504730664936336, + 55.01976145939095 + ], + [ + 12.53760826900682, + 55.002386786064235 + ], + [ + 12.55372154390337, + 54.96808496988032 + ], + [ + 12.551768422221377, + 54.957180053498845 + ], + [ + 12.542653852731997, + 54.95115804744394 + ], + [ + 12.526215027097674, + 54.948675724667865 + ], + [ + 12.50277754952713, + 54.948187658239775 + ], + [ + 12.384613477671735, + 54.96308015149127 + ], + [ + 12.348399295061654, + 54.9600284845324 + ], + [ + 12.313975460963622, + 54.9508731525377 + ], + [ + 12.280039906145827, + 54.9345563444049 + ], + [ + 12.226817255540526, + 54.89264559010829 + ], + [ + 12.20777429436542, + 54.886175940011945 + ], + [ + 12.178721557317544, + 54.88898353805816 + ], + [ + 12.115000834841005, + 54.90729389169413 + ], + [ + 12.115000837406315, + 54.913519503092054 + ], + [ + 12.129160994395093, + 54.913519503092054 + ], + [ + 12.124685100600102, + 54.92373289998612 + ], + [ + 12.128916853482357, + 54.931626600998236 + ], + [ + 12.138194209849308, + 54.9372419571147 + ], + [ + 12.149099151473372, + 54.940822623235334 + ], + [ + 12.144053567516108, + 54.94379289317412 + ], + [ + 12.13453208548344, + 54.95160542236959 + ], + [ + 12.129160995262616, + 54.95441306675911 + ], + [ + 12.151377788317765, + 54.96889871494803 + ], + [ + 12.164398619729067, + 54.97264217961834 + ], + [ + 12.177012560086311, + 54.96808496988032 + ], + [ + 12.184336776929486, + 54.975531237746765 + ], + [ + 12.177012559396879, + 54.98920312714131 + ], + [ + 12.193125835201323, + 54.98834858425947 + ], + [ + 12.217295781128865, + 54.97866457377333 + ], + [ + 12.232188338897794, + 54.975531237746765 + ], + [ + 12.242198113041608, + 54.97662995040588 + ], + [ + 12.27475021217615, + 54.984442607641455 + ], + [ + 12.286875840337306, + 54.98920312714131 + ], + [ + 12.281504758105072, + 54.99909092696695 + ], + [ + 12.28126061298283, + 55.007147527832096 + ], + [ + 12.285655143943591, + 55.013128972450346 + ], + [ + 12.294200066026246, + 55.01654694225437 + ], + [ + 12.294200066112452, + 55.02338288109222 + ], + [ + 12.261566602115444, + 55.03681061512405 + ], + [ + 12.252696159890135, + 55.0438500009283 + ], + [ + 12.247569207110306, + 55.05385976907461 + ], + [ + 12.250987174882193, + 55.0607363938512 + ], + [ + 12.261973504159947, + 55.064276435556344 + ], + [ + 12.280039910160186, + 55.064276435556344 + ], + [ + 12.30176842515586, + 55.042059638512434 + ], + [ + 12.335297070956194, + 55.033270574575106 + ], + [ + 12.417165560903817, + 55.03021881607416 + ] + ] + ], + [ + [ + [ + 12.359205489553343, + 55.60459309122142 + ], + [ + 12.331309441686834, + 55.59540437055523 + ], + [ + 12.31202233376881, + 55.58397046191442 + ], + [ + 12.248383008653981, + 55.54625071466998 + ], + [ + 12.248220248456937, + 55.546128652396355 + ], + [ + 12.19809004091944, + 55.48749421689665 + ], + [ + 12.22486412957257, + 55.43488190853541 + ], + [ + 12.273448112901432, + 55.41022369904938 + ], + [ + 12.290537957949557, + 55.40692781113812 + ], + [ + 12.352061392945194, + 55.404445694897305 + ], + [ + 12.368825715988994, + 55.40009185432919 + ], + [ + 12.412119985986495, + 55.37913644079626 + ], + [ + 12.434743684535524, + 55.36367420705257 + ], + [ + 12.444590690035609, + 55.348863013818445 + ], + [ + 12.446299675048309, + 55.331691799459215 + ], + [ + 12.450856967719215, + 55.31720612884864 + ], + [ + 12.46501712304485, + 55.290269273426425 + ], + [ + 12.450205924410247, + 55.27968984037714 + ], + [ + 12.444590691068491, + 55.2765567086525 + ], + [ + 12.392425976521851, + 55.25531646841701 + ], + [ + 12.182790560150604, + 55.227199602713156 + ], + [ + 12.167002800192483, + 55.21735260787991 + ], + [ + 12.141368034333517, + 55.207993875471686 + ], + [ + 12.121104363201272, + 55.191066800974646 + ], + [ + 12.109060091757371, + 55.17072174461459 + ], + [ + 12.115000846889258, + 55.1537132819106 + ], + [ + 12.107432488248671, + 55.14508698344561 + ], + [ + 12.098968945712947, + 55.14142486917319 + ], + [ + 12.09009850363263, + 55.142238670379705 + ], + [ + 12.080739779925821, + 55.14687734526829 + ], + [ + 12.08814537916541, + 55.16836172262812 + ], + [ + 12.076670769550468, + 55.18195222142304 + ], + [ + 12.056407096758033, + 55.185695702611234 + ], + [ + 12.036794466094433, + 55.17755767504625 + ], + [ + 12.02784264348068, + 55.17267486686016 + ], + [ + 12.020030144927066, + 55.170965896179474 + ], + [ + 12.014659050893867, + 55.167466547855454 + ], + [ + 12.012461785034107, + 55.15713125233733 + ], + [ + 12.017914259067021, + 55.1530215526635 + ], + [ + 12.046641472120578, + 55.140082099191496 + ], + [ + 12.130544466824041, + 55.135891016271344 + ], + [ + 12.168630404778035, + 55.12848541682601 + ], + [ + 12.177012565590688, + 55.10529205499124 + ], + [ + 12.171397332175514, + 55.0954450557191 + ], + [ + 12.165049675448072, + 55.0912946683926 + ], + [ + 12.157725456721337, + 55.089178775265076 + ], + [ + 12.14909915474574, + 55.08539459550287 + ], + [ + 12.128428582052921, + 55.078843492519844 + ], + [ + 12.118500196177802, + 55.07355377975029 + ], + [ + 12.125743035246046, + 55.0711123744209 + ], + [ + 12.163340691111259, + 55.02338288109222 + ], + [ + 12.162282748004234, + 55.00438060104112 + ], + [ + 12.148936384941022, + 54.994126605825144 + ], + [ + 12.128428577349311, + 54.98997625813666 + ], + [ + 12.104991075435928, + 54.98920312714131 + ], + [ + 12.089528844388004, + 54.98566317662728 + ], + [ + 12.055918802169598, + 54.97077043772056 + ], + [ + 12.032969591156272, + 54.96808496988032 + ], + [ + 11.998057483543878, + 54.976507835561854 + ], + [ + 11.943858269030692, + 55.00608958530637 + ], + [ + 11.91016686300681, + 55.00971100506752 + ], + [ + 11.90951582099313, + 55.00495026193042 + ], + [ + 11.911143425006369, + 55.00381094006302 + ], + [ + 11.913828971987568, + 55.003851629874376 + ], + [ + 11.916840039996298, + 55.00287506696223 + ], + [ + 11.9113875660031, + 55.00092194203031 + ], + [ + 11.908376509967857, + 54.99860272617312 + ], + [ + 11.904633018773572, + 54.9965681991756 + ], + [ + 11.897146018029318, + 54.995428657673976 + ], + [ + 11.901540552226553, + 54.97638572079925 + ], + [ + 11.894379095758357, + 54.96499251719451 + ], + [ + 11.885020388302207, + 54.95514566869857 + ], + [ + 11.882823109550706, + 54.940822623235334 + ], + [ + 11.890147331617431, + 54.92963287614468 + ], + [ + 11.903330928024944, + 54.92300042344825 + ], + [ + 11.91846763971696, + 54.921942406939635 + ], + [ + 11.931162968565891, + 54.92772056113519 + ], + [ + 11.928558777785014, + 54.93138243133094 + ], + [ + 11.923675973538836, + 54.940822623235334 + ], + [ + 11.932383667628544, + 54.94147377656314 + ], + [ + 11.957855674111904, + 54.948187658239775 + ], + [ + 11.965993681953197, + 54.94261298249913 + ], + [ + 11.980235234662349, + 54.92426191263591 + ], + [ + 11.982188332514237, + 54.92031470113451 + ], + [ + 11.991465689476021, + 54.91632720076877 + ], + [ + 12.023936385407016, + 54.89801658331203 + ], + [ + 12.040375194146172, + 54.89301176753617 + ], + [ + 12.058929870458066, + 54.8943544243411 + ], + [ + 12.078298365472063, + 54.897853833270304 + ], + [ + 12.096446150858279, + 54.898097958386884 + ], + [ + 12.111827019507949, + 54.88959382202874 + ], + [ + 12.139903180514333, + 54.86493551034474 + ], + [ + 12.170176622766075, + 54.84454981049402 + ], + [ + 12.170176631036847, + 54.8383243060986 + ], + [ + 12.081716342933536, + 54.79645417828962 + ], + [ + 12.067149299453042, + 54.7868107580399 + ], + [ + 12.02800540569684, + 54.74355703997523 + ], + [ + 12.022715690562311, + 54.73529693761481 + ], + [ + 12.012705911989062, + 54.73110573054387 + ], + [ + 11.985850450053752, + 54.712306963159826 + ], + [ + 11.975271043725133, + 54.70799402013389 + ], + [ + 11.960703967756812, + 54.69407782422585 + ], + [ + 11.960215686975578, + 54.66233954439386 + ], + [ + 11.971446149741864, + 54.60561747643132 + ], + [ + 11.969004742733691, + 54.56976948411546 + ], + [ + 11.949066616400767, + 54.56858973081493 + ], + [ + 11.928233282145229, + 54.58397057827691 + ], + [ + 11.923675971145741, + 54.59813054164454 + ], + [ + 11.909353049365398, + 54.60952371193509 + ], + [ + 11.876638203817894, + 54.64695859200255 + ], + [ + 11.86915125062445, + 54.659613476363404 + ], + [ + 11.870616075192352, + 54.68036523282794 + ], + [ + 11.882660359861722, + 54.69643797276442 + ], + [ + 11.910166865058796, + 54.72166577782497 + ], + [ + 11.889903192205171, + 54.72418855621003 + ], + [ + 11.87582440914097, + 54.72972233857388 + ], + [ + 11.872325078063387, + 54.73993573635425 + ], + [ + 11.882823113686953, + 54.75641511295742 + ], + [ + 11.871429882281001, + 54.760321338572595 + ], + [ + 11.86524499154493, + 54.765448344950556 + ], + [ + 11.855479363311744, + 54.77692292216238 + ], + [ + 11.809092653018478, + 54.804917801797004 + ], + [ + 11.805430542711507, + 54.80951577050808 + ], + [ + 11.80095460978061, + 54.817857029395824 + ], + [ + 11.798838724259534, + 54.825872974095205 + ], + [ + 11.800059446399633, + 54.83120357296399 + ], + [ + 11.799815306972512, + 54.83657468196597 + ], + [ + 11.793467637959045, + 54.84454981049402 + ], + [ + 11.776052281603485, + 54.85614656834425 + ], + [ + 11.760590040131047, + 54.863226631336644 + ], + [ + 11.749685091747752, + 54.87311432242379 + ], + [ + 11.745616080191573, + 54.89301176753617 + ], + [ + 11.745371928397926, + 54.91132220440013 + ], + [ + 11.742035342821978, + 54.92544170535037 + ], + [ + 11.731944213175325, + 54.93528886704727 + ], + [ + 11.711436390600463, + 54.940822623235334 + ], + [ + 11.711436402924152, + 54.948187658239775 + ], + [ + 11.749766456432706, + 54.965724843432746 + ], + [ + 11.759287951289178, + 54.96808496988032 + ], + [ + 11.773610868039578, + 54.96503321957469 + ], + [ + 11.79297937676343, + 54.951320844731924 + ], + [ + 11.803965699994658, + 54.948187658239775 + ], + [ + 11.814789258887432, + 54.952093815858696 + ], + [ + 11.837575713663643, + 54.966131869268324 + ], + [ + 11.841807503579913, + 54.96466721646516 + ], + [ + 11.848317890520025, + 54.95913305870945 + ], + [ + 11.862478047797072, + 54.96369036378688 + ], + [ + 11.882823105129203, + 54.975531237746765 + ], + [ + 11.882823116605525, + 54.98175693929783 + ], + [ + 11.872325075363035, + 54.98956949732511 + ], + [ + 11.853526237945724, + 55.02008698045201 + ], + [ + 11.841807487908317, + 55.03021881607416 + ], + [ + 11.774668816157412, + 55.04857005559562 + ], + [ + 11.755625846948723, + 55.05068593947829 + ], + [ + 11.75025475380844, + 55.052801823052626 + ], + [ + 11.736989779824334, + 55.062201237212925 + ], + [ + 11.728770379152994, + 55.064276435556344 + ], + [ + 11.665537956783496, + 55.06639231778394 + ], + [ + 11.646657747804463, + 55.07233307299589 + ], + [ + 11.630381707136898, + 55.08112213840362 + ], + [ + 11.615896030054175, + 55.09223053655587 + ], + [ + 11.669606967271775, + 55.085638741777906 + ], + [ + 11.775238476761748, + 55.057440494583716 + ], + [ + 11.827484570948393, + 55.05068593947829 + ], + [ + 11.80534915496288, + 55.07387929862434 + ], + [ + 11.743418816226786, + 55.10053131930205 + ], + [ + 11.717621290039412, + 55.12571849240018 + ], + [ + 11.803965690567331, + 55.13483307063208 + ], + [ + 11.809580924837514, + 55.13886139336001 + ], + [ + 11.806976758927522, + 55.14687734526829 + ], + [ + 11.785411004135748, + 55.15721263337069 + ], + [ + 11.728526238216995, + 55.15477122820228 + ], + [ + 11.711436394461423, + 55.167914134689134 + ], + [ + 11.726247591141602, + 55.17694732428767 + ], + [ + 11.73471113369988, + 55.18866607995658 + ], + [ + 11.733083529464961, + 55.20014068657598 + ], + [ + 11.717621290853144, + 55.20831941165712 + ], + [ + 11.695078972729206, + 55.20807526941348 + ], + [ + 11.67823326890401, + 55.199367579021576 + ], + [ + 11.663828971479502, + 55.18829986668994 + ], + [ + 11.649424675430996, + 55.18097565740169 + ], + [ + 11.636241081791297, + 55.181626692864945 + ], + [ + 11.599375847016528, + 55.19131094016861 + ], + [ + 11.52418053410674, + 55.20209380778066 + ], + [ + 11.506032748837727, + 55.20831941165712 + ], + [ + 11.492360873621697, + 55.20416901843293 + ], + [ + 11.423187696785993, + 55.2219099061782 + ], + [ + 11.410329622600706, + 55.220119528839625 + ], + [ + 11.382985872915475, + 55.21116770911658 + ], + [ + 11.36890709782778, + 55.20831941165712 + ], + [ + 11.299978061822719, + 55.20831941165712 + ], + [ + 11.299978061740706, + 55.20148347379554 + ], + [ + 11.320485873715903, + 55.1946475355221 + ], + [ + 11.285899284875034, + 55.19537994868205 + ], + [ + 11.26099694020241, + 55.20058827457524 + ], + [ + 11.245371940253325, + 55.21478912210494 + ], + [ + 11.238536003830053, + 55.242417708202645 + ], + [ + 11.253103061527058, + 55.236273510564864 + ], + [ + 11.267832879605496, + 55.235907300384405 + ], + [ + 11.2800399108165, + 55.24233633859871 + ], + [ + 11.28630618519703, + 55.25608957655029 + ], + [ + 11.276052279163718, + 55.25161366519073 + ], + [ + 11.265391471627082, + 55.249457094067104 + ], + [ + 11.241953972799505, + 55.248683994446814 + ], + [ + 11.238780144823496, + 55.25413646870143 + ], + [ + 11.241547071700618, + 55.27850984039653 + ], + [ + 11.235118034819514, + 55.28400299509278 + ], + [ + 11.21713300851041, + 55.29022857782053 + ], + [ + 11.162119987692769, + 55.319159243735825 + ], + [ + 11.152842643463767, + 55.328436585299954 + ], + [ + 11.161957227315348, + 55.33087800034785 + ], + [ + 11.17644290506829, + 55.331854559723794 + ], + [ + 11.188243036078866, + 55.335272539427166 + ], + [ + 11.190114780025699, + 55.34544505427165 + ], + [ + 11.182302279411676, + 55.35040924076694 + ], + [ + 11.167979363291282, + 55.35276927608878 + ], + [ + 11.152598502213515, + 55.35321684502423 + ], + [ + 11.142344597622206, + 55.352280998613985 + ], + [ + 11.13542727982451, + 55.34686920613249 + ], + [ + 11.130869986958954, + 55.338446344915894 + ], + [ + 11.123545769068011, + 55.331854559723794 + ], + [ + 11.108164910067913, + 55.331854559723794 + ], + [ + 11.101898632508913, + 55.33633045508252 + ], + [ + 11.087738478802478, + 55.358547287253366 + ], + [ + 11.087738477452657, + 55.365952871833585 + ], + [ + 11.114593944821046, + 55.364406630438026 + ], + [ + 11.128916864352218, + 55.36517976938758 + ], + [ + 11.13892662793693, + 55.369370823698866 + ], + [ + 11.147715692449738, + 55.37592195739491 + ], + [ + 11.159190299796421, + 55.38129303383966 + ], + [ + 11.171722851791724, + 55.38507721379276 + ], + [ + 11.1832788436571, + 55.386420006536554 + ], + [ + 11.204356314555604, + 55.39183175774402 + ], + [ + 11.214203321572922, + 55.40379467004302 + ], + [ + 11.213145378163368, + 55.41575754517551 + ], + [ + 11.20093834739421, + 55.42121003216065 + ], + [ + 11.20484459829513, + 55.431341877662916 + ], + [ + 11.196462434363138, + 55.45384347472771 + ], + [ + 11.176442905396529, + 55.489447337187244 + ], + [ + 11.163910351439373, + 55.50189849907388 + ], + [ + 11.151133661296738, + 55.510321369717325 + ], + [ + 11.135590041493469, + 55.51512280481872 + ], + [ + 11.115000846499239, + 55.51679107768524 + ], + [ + 11.097178580186652, + 55.50950753472209 + ], + [ + 11.08578535243469, + 55.5075544336253 + ], + [ + 11.080902538693184, + 55.513088269088 + ], + [ + 11.082774284855729, + 55.52655670646411 + ], + [ + 11.088552279424825, + 55.53384022988425 + ], + [ + 11.098155145501051, + 55.53668855994345 + ], + [ + 11.111582877548551, + 55.53729887960059 + ], + [ + 11.137950064129111, + 55.54291411020055 + ], + [ + 11.148448111862042, + 55.55731841497261 + ], + [ + 11.14568118661247, + 55.57611726147084 + ], + [ + 11.13209068915189, + 55.59528227445812 + ], + [ + 11.087901239809568, + 55.626939214192994 + ], + [ + 11.03923587259579, + 55.64345937269504 + ], + [ + 10.929942254051111, + 55.66014232054865 + ], + [ + 10.929942254229687, + 55.66762930146802 + ], + [ + 11.087738477051841, + 55.66014232054865 + ], + [ + 11.071055537640907, + 55.6769473610169 + ], + [ + 11.008636916723791, + 55.69228752038356 + ], + [ + 10.984629753302741, + 55.70111724754837 + ], + [ + 10.97429446804269, + 55.710842201149234 + ], + [ + 10.96827233373578, + 55.718410567566785 + ], + [ + 10.960215689223787, + 55.72435128498792 + ], + [ + 10.943614128486258, + 55.72907135049285 + ], + [ + 10.891449413292984, + 55.73126860361674 + ], + [ + 10.875498892465103, + 55.73651762844807 + ], + [ + 10.87549889061176, + 55.74274319446913 + ], + [ + 11.033050976482048, + 55.72907135049285 + ], + [ + 11.065684439619737, + 55.73224516936971 + ], + [ + 11.128184443165532, + 55.746323982814346 + ], + [ + 11.16277103044937, + 55.749497793718824 + ], + [ + 11.162771026522215, + 55.74274319446913 + ], + [ + 11.151703318215032, + 55.743638384717784 + ], + [ + 11.141123893670155, + 55.74323150852776 + ], + [ + 11.13086998976822, + 55.74103426562653 + ], + [ + 11.121836783430332, + 55.73651762844807 + ], + [ + 11.137543164890147, + 55.71906158984218 + ], + [ + 11.163422074181126, + 55.70538974944428 + ], + [ + 11.192637567887518, + 55.70034416279445 + ], + [ + 11.218028190666626, + 55.708563540511534 + ], + [ + 11.176442904475289, + 55.72907135049285 + ], + [ + 11.190114778420654, + 55.73651762844807 + ], + [ + 11.199473507381192, + 55.73110589640942 + ], + [ + 11.216156448434358, + 55.72874586045725 + ], + [ + 11.252207878471767, + 55.72907135049285 + ], + [ + 11.268402538873948, + 55.73273345427919 + ], + [ + 11.302419468288994, + 55.74652742437044 + ], + [ + 11.336680532415349, + 55.7509219152723 + ], + [ + 11.352224157925917, + 55.75580478521013 + ], + [ + 11.364024286219097, + 55.76512280157037 + ], + [ + 11.368907094072032, + 55.779974646830766 + ], + [ + 11.369965039831863, + 55.787787175267454 + ], + [ + 11.374685095517313, + 55.80158115820151 + ], + [ + 11.375661653215527, + 55.807928759637534 + ], + [ + 11.372080928609416, + 55.8158633851437 + ], + [ + 11.354665563245073, + 55.83022697813047 + ], + [ + 11.348399286594058, + 55.83832430242831 + ], + [ + 11.37387129011109, + 55.82876211214201 + ], + [ + 11.398448116452467, + 55.82640212043672 + ], + [ + 11.421722852737085, + 55.83197663754851 + ], + [ + 11.443369989115974, + 55.84577058640335 + ], + [ + 11.477549678900688, + 55.84153884374579 + ], + [ + 11.505218944043222, + 55.868719774122404 + ], + [ + 11.510508663296642, + 55.908270608436816 + ], + [ + 11.477386911672125, + 55.9413515961765 + ], + [ + 11.448415561504708, + 55.948187572141286 + ], + [ + 11.384287957422004, + 55.95083242832225 + ], + [ + 11.30453535337036, + 55.96784090113025 + ], + [ + 11.28191165370975, + 55.978949272671926 + ], + [ + 11.272634310821141, + 55.996568099151304 + ], + [ + 11.400563999022541, + 55.961981522456576 + ], + [ + 11.426931186467694, + 55.96552155677095 + ], + [ + 11.581065303262973, + 55.95184970985653 + ], + [ + 11.599375843892053, + 55.94208402074429 + ], + [ + 11.640879753545843, + 55.943426820448494 + ], + [ + 11.711436391587137, + 55.95563383197502 + ], + [ + 11.747813350211619, + 55.96824782086688 + ], + [ + 11.768321159506486, + 55.970526429111075 + ], + [ + 11.779795769892973, + 55.9618594508372 + ], + [ + 11.775401241317716, + 55.95184970985653 + ], + [ + 11.747325063598433, + 55.93280668414642 + ], + [ + 11.738780146366025, + 55.92084384052331 + ], + [ + 11.750498891653283, + 55.91038643668394 + ], + [ + 11.743988476746129, + 55.9037539704752 + ], + [ + 11.727875195707872, + 55.902289127090796 + ], + [ + 11.711436390421193, + 55.907171906321324 + ], + [ + 11.713552280855303, + 55.91083406052398 + ], + [ + 11.717621292361741, + 55.92084384052331 + ], + [ + 11.685801629605821, + 55.919867261050065 + ], + [ + 11.67286217531134, + 55.91592031811254 + ], + [ + 11.66309654643597, + 55.907171906321324 + ], + [ + 11.7126571005798, + 55.85809971272858 + ], + [ + 11.731618684020786, + 55.83071521626328 + ], + [ + 11.717621288470731, + 55.81098052072379 + ], + [ + 11.702403190897638, + 55.809475000975326 + ], + [ + 11.671234570729771, + 55.814276431289564 + ], + [ + 11.656260611478762, + 55.81098052072379 + ], + [ + 11.637950065575446, + 55.79608795372773 + ], + [ + 11.626231312786677, + 55.790594761632434 + ], + [ + 11.607758009686085, + 55.79047272992114 + ], + [ + 11.607758005685001, + 55.7830264005488 + ], + [ + 11.62427819178041, + 55.78001537486348 + ], + [ + 11.673594593666225, + 55.7830264005488 + ], + [ + 11.687022329049313, + 55.786444373429354 + ], + [ + 11.702810090195634, + 55.79340239294499 + ], + [ + 11.720388217438149, + 55.7986514383765 + ], + [ + 11.738780145430493, + 55.7973493652674 + ], + [ + 11.745453324799955, + 55.78937412888774 + ], + [ + 11.75586998988046, + 55.77293530774021 + ], + [ + 11.763682484324447, + 55.75682197037696 + ], + [ + 11.762705925473561, + 55.749497793718824 + ], + [ + 11.682302283299082, + 55.74127842011318 + ], + [ + 11.664561394260977, + 55.7371279996229 + ], + [ + 11.65723717848307, + 55.73086175604403 + ], + [ + 11.669932489921235, + 55.72284578931272 + ], + [ + 11.68433678513369, + 55.7216250683415 + ], + [ + 11.729258658606266, + 55.72357817805957 + ], + [ + 11.738780147206834, + 55.7259789400447 + ], + [ + 11.746348502199954, + 55.731024463025584 + ], + [ + 11.763031447416793, + 55.721380941129695 + ], + [ + 11.787282747251759, + 55.70111724754837 + ], + [ + 11.794444209229011, + 55.687933683190366 + ], + [ + 11.793793164618812, + 55.6830101542037 + ], + [ + 11.78793379281189, + 55.6769473610169 + ], + [ + 11.779795769055085, + 55.66014232054865 + ], + [ + 11.7976180390053, + 55.66962315388267 + ], + [ + 11.81055748698889, + 55.67910389794443 + ], + [ + 11.852305533166701, + 55.73090239285704 + ], + [ + 11.854404201249086, + 55.73215351630363 + ], + [ + 11.855638462155492, + 55.73205068609124 + ], + [ + 11.893517283879806, + 55.733652624758726 + ], + [ + 11.905764598997523, + 55.736882413791534 + ], + [ + 11.907521598852634, + 55.739440403447716 + ], + [ + 11.909071894654756, + 55.74062899228263 + ], + [ + 11.92488488146605, + 55.73794180453806 + ], + [ + 11.97266686693785, + 55.72142165558555 + ], + [ + 11.965342646706738, + 55.70490146054979 + ], + [ + 11.958181189718621, + 55.69342686150855 + ], + [ + 11.923024939680078, + 55.67413975225489 + ], + [ + 11.910166863055695, + 55.66014232054865 + ], + [ + 11.930430533460264, + 55.65761951783294 + ], + [ + 11.959157747947033, + 55.6651065114792 + ], + [ + 11.982188346638525, + 55.678900454455984 + ], + [ + 11.985199411719364, + 55.69489163686437 + ], + [ + 12.001149938843394, + 55.69867587581191 + ], + [ + 12.017588739538805, + 55.69570548603143 + ], + [ + 12.031911651328809, + 55.68720120317136 + ], + [ + 12.040375194487208, + 55.67446522124108 + ], + [ + 12.009287958200504, + 55.68048738373947 + ], + [ + 12.005137563542096, + 55.67816801195262 + ], + [ + 12.011403843544446, + 55.66901277710518 + ], + [ + 12.026621942196272, + 55.66205476668677 + ], + [ + 12.060313347306188, + 55.65399811698287 + ], + [ + 12.069102408731863, + 55.674953505657285 + ], + [ + 12.09205162920757, + 55.706732491013874 + ], + [ + 12.095062695432182, + 55.72907135049285 + ], + [ + 12.076088193567447, + 55.77892115166933 + ], + [ + 12.076296830281649, + 55.77892123152771 + ], + [ + 12.112935421647217, + 55.77881789478984 + ], + [ + 12.151589390997403, + 55.75757885450273 + ], + [ + 12.16786746484549, + 55.748561293969836 + ], + [ + 12.240472855667889, + 55.741249118363235 + ], + [ + 12.22667524467127, + 55.73380766988351 + ], + [ + 12.228380566775812, + 55.72838165083446 + ], + [ + 12.230550979544354, + 55.72584952862235 + ], + [ + 12.232876418085002, + 55.72347239903002 + ], + [ + 12.236442089109945, + 55.72119860434869 + ], + [ + 12.255717397844352, + 55.7145323448421 + ], + [ + 12.258042840268807, + 55.712206937012766 + ], + [ + 12.254942249755779, + 55.70776272342719 + ], + [ + 12.215668167435675, + 55.691484701978915 + ], + [ + 12.192672150345633, + 55.686342855716745 + ], + [ + 12.157945590272014, + 55.67055572237396 + ], + [ + 12.165542027708844, + 55.66430289898831 + ], + [ + 12.166317174396083, + 55.662442541528854 + ], + [ + 12.167092319578037, + 55.660168775752645 + ], + [ + 12.166627230340989, + 55.65771412477824 + ], + [ + 12.165076940026612, + 55.65469107687858 + ], + [ + 12.163836704744895, + 55.65071199109456 + ], + [ + 12.162596471078688, + 55.64373567000555 + ], + [ + 12.164456822637751, + 55.638413002986375 + ], + [ + 12.16910769943336, + 55.63404633094207 + ], + [ + 12.17019290475286, + 55.62221242534733 + ], + [ + 12.201095412256285, + 55.61428009919232 + ], + [ + 12.220164015831859, + 55.61490022010679 + ], + [ + 12.23985273542413, + 55.62156646767534 + ], + [ + 12.251996697700063, + 55.616347150761534 + ], + [ + 12.281297236281251, + 55.61898266056878 + ], + [ + 12.316695592103654, + 55.61655387320317 + ], + [ + 12.359121940002405, + 55.604616586275746 + ], + [ + 12.359205489553343, + 55.60459309122142 + ] + ] + ] + ] + }, + "type": "Feature", + "properties": { + "source": "https://simplemaps.com", + "id": "DK85", + "name": "Sjaælland" + }, + "id": 4 + }, + { + "geometry": { + "type": "MultiPolygon", + "coordinates": [ + [ + [ + [ + 11.97266686693785, + 55.72142165558555 + ], + [ + 11.92488488146605, + 55.73794180453806 + ], + [ + 11.909071894654756, + 55.74062899228263 + ], + [ + 11.907521598852634, + 55.739440403447716 + ], + [ + 11.905764598997523, + 55.736882413791534 + ], + [ + 11.893517283879806, + 55.733652624758726 + ], + [ + 11.855638462155492, + 55.73205068609124 + ], + [ + 11.854404201249086, + 55.73215351630363 + ], + [ + 11.85547935922123, + 55.73281480861671 + ], + [ + 11.853282095926788, + 55.73997629338215 + ], + [ + 11.844004750473028, + 55.75267158209198 + ], + [ + 11.84180749178994, + 55.75641514351241 + ], + [ + 11.844981314322053, + 55.767889699400115 + ], + [ + 11.852712435146506, + 55.78229400356512 + ], + [ + 11.869151235990493, + 55.80475493017968 + ], + [ + 11.884043818869664, + 55.813666111260304 + ], + [ + 11.919444209463188, + 55.81940341118172 + ], + [ + 11.937347855385827, + 55.82526279518501 + ], + [ + 11.953623892887538, + 55.853501684118406 + ], + [ + 11.933116085262888, + 55.88613518994075 + ], + [ + 11.909515824890143, + 55.915472761127745 + ], + [ + 11.91684004376976, + 55.93451572890702 + ], + [ + 11.934743686453116, + 55.933661204429285 + ], + [ + 11.969981316259638, + 55.91258372853133 + ], + [ + 11.988617380336466, + 55.907171906321324 + ], + [ + 11.998220246209474, + 55.90281808357406 + ], + [ + 11.997894724984262, + 55.89276762537649 + ], + [ + 11.993825713441622, + 55.881537142337585 + ], + [ + 11.992035350516625, + 55.873724662544774 + ], + [ + 11.996348504229038, + 55.86107005623102 + ], + [ + 12.000173374354311, + 55.85651277519341 + ], + [ + 12.004893426425802, + 55.85346101888511 + ], + [ + 12.012461786171466, + 55.84577058640335 + ], + [ + 12.031260612244127, + 55.816595762646806 + ], + [ + 12.049001500090608, + 55.77533602532673 + ], + [ + 12.051931184614052, + 55.73663970251359 + ], + [ + 12.026133656947984, + 55.715399451221316 + ], + [ + 11.99000084644076, + 55.722805074527386 + ], + [ + 11.97266686693785, + 55.72142165558555 + ] + ] + ], + [ + [ + [ + 12.359205489553343, + 55.60459309122142 + ], + [ + 12.359121940002405, + 55.604616586275746 + ], + [ + 12.316695592103654, + 55.61655387320317 + ], + [ + 12.281297236281251, + 55.61898266056878 + ], + [ + 12.251996697700063, + 55.616347150761534 + ], + [ + 12.23985273542413, + 55.62156646767534 + ], + [ + 12.220164015831859, + 55.61490022010679 + ], + [ + 12.201095412256285, + 55.61428009919232 + ], + [ + 12.17019290475286, + 55.62221242534733 + ], + [ + 12.16910769943336, + 55.63404633094207 + ], + [ + 12.164456822637751, + 55.638413002986375 + ], + [ + 12.162596471078688, + 55.64373567000555 + ], + [ + 12.163836704744895, + 55.65071199109456 + ], + [ + 12.165076940026612, + 55.65469107687858 + ], + [ + 12.166627230340989, + 55.65771412477824 + ], + [ + 12.167092319578037, + 55.660168775752645 + ], + [ + 12.166317174396083, + 55.662442541528854 + ], + [ + 12.165542027708844, + 55.66430289898831 + ], + [ + 12.157945590272014, + 55.67055572237396 + ], + [ + 12.192672150345633, + 55.686342855716745 + ], + [ + 12.215668167435675, + 55.691484701978915 + ], + [ + 12.254942249755779, + 55.70776272342719 + ], + [ + 12.258042840268807, + 55.712206937012766 + ], + [ + 12.255717397844352, + 55.7145323448421 + ], + [ + 12.236442089109945, + 55.72119860434869 + ], + [ + 12.232876418085002, + 55.72347239903002 + ], + [ + 12.230550979544354, + 55.72584952862235 + ], + [ + 12.228380566775812, + 55.72838165083446 + ], + [ + 12.22667524467127, + 55.73380766988351 + ], + [ + 12.240472855667889, + 55.741249118363235 + ], + [ + 12.16786746484549, + 55.748561293969836 + ], + [ + 12.151589390997403, + 55.75757885450273 + ], + [ + 12.112935421647217, + 55.77881789478984 + ], + [ + 12.076296830281649, + 55.77892123152771 + ], + [ + 12.076088193567447, + 55.77892115166933 + ], + [ + 12.053477411693088, + 55.83832430242831 + ], + [ + 12.053477408509035, + 55.873724662544774 + ], + [ + 12.04981529655972, + 55.8880068696489 + ], + [ + 12.026133656513036, + 55.9413515961765 + ], + [ + 12.01059003853187, + 55.95526763074563 + ], + [ + 11.992442254012786, + 55.96059804912358 + ], + [ + 11.970713737951074, + 55.95986562652268 + ], + [ + 11.894297724884726, + 55.942287530289725 + ], + [ + 11.865489132264011, + 55.9393578470896 + ], + [ + 11.848643425522397, + 55.948187572141286 + ], + [ + 11.865570507074159, + 55.972723681081824 + ], + [ + 11.917328319325891, + 55.99762602763652 + ], + [ + 12.012461783142959, + 56.03070708000749 + ], + [ + 12.16537519656009, + 56.103705150347466 + ], + [ + 12.245127802469614, + 56.12872958241207 + ], + [ + 12.328461134799328, + 56.12689851252647 + ], + [ + 12.41488691781484, + 56.09902580934033 + ], + [ + 12.486094599831008, + 56.09902580934033 + ], + [ + 12.505381705184858, + 56.0931663911338 + ], + [ + 12.540375199281172, + 56.075384863415465 + ], + [ + 12.570078971643118, + 56.06826406169815 + ], + [ + 12.621918168814245, + 56.04376865715721 + ], + [ + 12.608409053024259, + 56.02802157991647 + ], + [ + 12.567393424800635, + 55.996568099151304 + ], + [ + 12.552744986683214, + 55.979681695774694 + ], + [ + 12.525563994809978, + 55.9398867562898 + ], + [ + 12.512705927504612, + 55.9282901293565 + ], + [ + 12.512705927522013, + 55.92084384052331 + ], + [ + 12.540293818392625, + 55.8982608312804 + ], + [ + 12.554453972538004, + 55.88287995500568 + ], + [ + 12.560557486278357, + 55.869940480980674 + ], + [ + 12.565114780986613, + 55.8511416781805 + ], + [ + 12.576019728613252, + 55.829169027006905 + ], + [ + 12.589691601889854, + 55.809475000975326 + ], + [ + 12.602224156535662, + 55.7973493652674 + ], + [ + 12.59799238827741, + 55.787583765764076 + ], + [ + 12.595876496640711, + 55.77423735935655 + ], + [ + 12.594737172788962, + 55.75299711642062 + ], + [ + 12.589040562598806, + 55.73574454789345 + ], + [ + 12.587901237409062, + 55.72907135049285 + ], + [ + 12.58920332213372, + 55.7242699345637 + ], + [ + 12.592784046443004, + 55.71914293785914 + ], + [ + 12.602224154625437, + 55.708563540511534 + ], + [ + 12.58122806481676, + 55.693060649616676 + ], + [ + 12.541026238752604, + 55.64887116805303 + ], + [ + 12.503265821872343, + 55.63401928120328 + ], + [ + 12.496755405911227, + 55.621771560576434 + ], + [ + 12.48715254091024, + 55.609849359576096 + ], + [ + 12.465017123708822, + 55.60553620669113 + ], + [ + 12.448578321394276, + 55.60887279272605 + ], + [ + 12.436778190301723, + 55.61440663639387 + ], + [ + 12.423675978511822, + 55.61884187831393 + ], + [ + 12.403575063999211, + 55.61920805602437 + ], + [ + 12.359205489553343, + 55.60459309122142 + ] + ] + ], + [ + [ + [ + 15.151377799699626, + 55.13320546863723 + ], + [ + 15.146657748589611, + 55.12824128263833 + ], + [ + 15.144541863021463, + 55.12482330916832 + ], + [ + 15.142344597514276, + 55.12213776604713 + ], + [ + 15.137705925117812, + 55.119533596927006 + ], + [ + 15.145030143887588, + 55.10488515711437 + ], + [ + 15.14771569065473, + 55.09560781228082 + ], + [ + 15.144704623448108, + 55.08710358652932 + ], + [ + 15.134287956713033, + 55.074855858736655 + ], + [ + 15.112803581866446, + 55.05463287894425 + ], + [ + 15.10661868591341, + 55.0446638033151 + ], + [ + 15.110362174967346, + 55.0363630227412 + ], + [ + 15.110362174882985, + 55.03021881607416 + ], + [ + 15.113242231028034, + 55.01829487422116 + ], + [ + 15.097353256984684, + 55.007822962878265 + ], + [ + 15.08668593561237, + 54.99921337492748 + ], + [ + 15.072666222229683, + 54.991637976082735 + ], + [ + 15.023663845453589, + 54.999339321486666 + ], + [ + 14.98286981900447, + 55.001013408035035 + ], + [ + 14.927265174002398, + 55.013691882018705 + ], + [ + 14.858606616102898, + 55.038226353826566 + ], + [ + 14.780832726085134, + 55.051128327687174 + ], + [ + 14.699554883663037, + 55.089178775265076 + ], + [ + 14.684172686869642, + 55.10134680694097 + ], + [ + 14.697044746375662, + 55.122653717045786 + ], + [ + 14.701426629604299, + 55.16469961589287 + ], + [ + 14.705739778939227, + 55.22565337842627 + ], + [ + 14.70923912897663, + 55.235825913810565 + ], + [ + 14.717539910126169, + 55.246771553017695 + ], + [ + 14.73707116072436, + 55.2666690178351 + ], + [ + 14.745127800978143, + 55.281561598872194 + ], + [ + 14.752126497751574, + 55.298773503001954 + ], + [ + 14.760915559844317, + 55.30927154271457 + ], + [ + 14.774587434970488, + 55.303900449734755 + ], + [ + 14.814952019097953, + 55.27041250278426 + ], + [ + 14.828949414029816, + 55.26170481223073 + ], + [ + 14.85142220523577, + 55.24819986088343 + ], + [ + 14.889008008366588, + 55.233221741944156 + ], + [ + 14.926799098885601, + 55.21579872612391 + ], + [ + 14.954354432958475, + 55.219254408720374 + ], + [ + 14.973399284455681, + 55.21613189867759 + ], + [ + 14.989756707522488, + 55.196030996145524 + ], + [ + 15.000498894293779, + 55.18842194432937 + ], + [ + 15.012868686289377, + 55.18382396229323 + ], + [ + 15.08454287843215, + 55.15457949151666 + ], + [ + 15.131602409617736, + 55.14545318899037 + ], + [ + 15.151377799699626, + 55.13320546863723 + ] + ] + ], + [ + [ + [ + 12.615733271804235, + 55.688137137099474 + ], + [ + 12.673838738720733, + 55.60553620669113 + ], + [ + 12.674571158594317, + 55.60126372594711 + ], + [ + 12.639414911684735, + 55.57827384569545 + ], + [ + 12.624196810743124, + 55.5756289706033 + ], + [ + 12.605723503426296, + 55.56928131163867 + ], + [ + 12.588877797997286, + 55.561346728257355 + ], + [ + 12.577972850907424, + 55.55402251776397 + ], + [ + 12.556325715120503, + 55.55048247136016 + ], + [ + 12.534841340304386, + 55.56932199106652 + ], + [ + 12.520518426804275, + 55.596747153960536 + ], + [ + 12.520192902980352, + 55.61920805602437 + ], + [ + 12.531016472324815, + 55.63190339004734 + ], + [ + 12.543955926921893, + 55.64036694701188 + ], + [ + 12.556814000024898, + 55.64695874495491 + ], + [ + 12.567393425319057, + 55.65399811698287 + ], + [ + 12.578135615085353, + 55.66644930547757 + ], + [ + 12.595062697527752, + 55.69135164424099 + ], + [ + 12.608409049199672, + 55.70111724754837 + ], + [ + 12.612559440804974, + 55.69513580718399 + ], + [ + 12.614105666025866, + 55.69245027154784 + ], + [ + 12.615733271804235, + 55.688137137099474 + ] + ] + ] + ] + }, + "type": "Feature", + "properties": { + "source": "https://simplemaps.com", + "id": "DK84", + "name": "Hovedstaden" + }, + "id": 5 + } + ] +} \ No newline at end of file diff --git a/src/ensemble_rl_example.py b/src/ensemble_rl_example.py deleted file mode 100644 index 8f13326f67dc2f763a40417905885db61b79c95f..0000000000000000000000000000000000000000 --- a/src/ensemble_rl_example.py +++ /dev/null @@ -1,346 +0,0 @@ -from collections import OrderedDict, deque, namedtuple -from typing import Iterator, List, Tuple -import gymnasium as gym -import lightning.pytorch as pl -import numpy as np -import torch -from lightning.pytorch.loggers import CSVLogger -from torch import Tensor, nn -from torch.optim import Adam, Optimizer -from torch.utils.data import DataLoader -from torch.utils.data.dataset import IterableDataset - -use_cuda = torch.cuda.is_available() - -if use_cuda: - accelerator = 'gpu' - torch.set_float32_matmul_precision('high') -else: - accelerator = 'cpu' - - -class DQN(nn.Module): - """Simple MLP network.""" - - def __init__(self, obs_size: int, n_actions: int, hidden_size: int = 128): - """ - Args: - obs_size: observation/state size of the environment - n_actions: number of discrete actions available in the environment - hidden_size: size of hidden layers - """ - super().__init__() - self.net = nn.Sequential( - nn.Linear(obs_size, hidden_size), - nn.ReLU(), - nn.Linear(hidden_size, n_actions), - ) - - def forward(self, x): - return self.net(x.float()) - -# Named tuple for storing experience steps gathered in training -Experience = namedtuple( - "Experience", - field_names=["state", "action", "reward", "done", "new_state"], -) - -class ReplayBuffer: - """Replay Buffer for storing past experiences allowing the agent to learn from them. - - Args: - capacity: size of the buffer - """ - - def __init__(self, capacity: int) -> None: - self.buffer = deque(maxlen=capacity) - - def __len__(self) -> int: - return len(self.buffer) - - def append(self, experience: Experience) -> None: - """Add experience to the buffer. - - Args: - experience: tuple (state, action, reward, done, new_state) - """ - self.buffer.append(experience) - - def sample(self, batch_size: int) -> Tuple: - indices = np.random.choice(len(self.buffer), batch_size, replace=False) - states, actions, rewards, dones, next_states = zip(*(self.buffer[idx] for idx in indices)) - - return ( - np.array(states), - np.array(actions), - np.array(rewards, dtype=np.float32), - np.array(dones, dtype=bool), - np.array(next_states), - ) - -class RLDataset(IterableDataset): - """Iterable Dataset containing the ExperienceBuffer which will be updated with new experiences during training. - - Args: - buffer: replay buffer - sample_size: number of experiences to sample at a time - """ - - def __init__(self, buffer: ReplayBuffer, sample_size: int = 200) -> None: - self.buffer = buffer - self.sample_size = sample_size - - def __iter__(self) -> Iterator[Tuple]: - states, actions, rewards, dones, new_states = self.buffer.sample(self.sample_size) - for i in range(len(dones)): - yield states[i], actions[i], rewards[i], dones[i], new_states[i] - -class Agent: - """Base Agent class handeling the interaction with the environment.""" - - def __init__(self, env: gym.Env, replay_buffer: ReplayBuffer) -> None: - """ - Args: - env: training environment - replay_buffer: replay buffer storing experiences - """ - self.env = env - self.replay_buffer = replay_buffer - self.reset() - self.state = self.env.reset() - - def reset(self) -> None: - """Resents the environment and updates the state.""" - self.state, _ = self.env.reset() - - def get_action(self, net: nn.Module, epsilon: float, device: str) -> int: - """Using the given network, decide what action to carry out using an epsilon-greedy policy. - - Args: - net: DQN network - epsilon: value to determine likelihood of taking a random action - device: current device - - Returns: - action - """ - if np.random.random() < epsilon: - action = self.env.action_space.sample() - else: - state = torch.tensor([self.state]) - - if device not in ["cpu"]: - state = state.cuda(device) - - q_values = net(state) - _, action = torch.max(q_values, dim=1) - action = int(action.item()) - - return action - - @torch.no_grad() - def play_step( - self, - net: nn.Module, - epsilon: float = 0.0, - device: str = "cpu", - ) -> Tuple[float, bool]: - """Carries out a single interaction step between the agent and the environment. - - Args: - net: DQN network - epsilon: value to determine likelihood of taking a random action - device: current device - - Returns: - reward, done - """ - - action = self.get_action(net, epsilon, device) - - # do step in the environment - #new_state, reward, done, _ = self.env.step(action) - new_state, reward, done, truncated, info = self.env.step(action) - - exp = Experience(self.state, action, reward, done, new_state) - - self.replay_buffer.append(exp) - - self.state = new_state - if done: - self.reset() - return reward, done - -class DQNLightning(pl.LightningModule): - """Basic DQN Model.""" - - def __init__( - self, - batch_size: int = 16, - lr: float = 1e-2, - env: str = "CartPole-v1", - gamma: float = 0.99, - sync_rate: int = 10, - replay_size: int = 1000, - warm_start_size: int = 1000, - eps_last_frame: int = 1000, - eps_start: float = 1.0, - eps_end: float = 0.01, - episode_length: int = 200, - warm_start_steps: int = 1000, - ) -> None: - """ - Args: - batch_size: size of the batches") - lr: learning rate - env: gym environment tag - gamma: discount factor - sync_rate: how many frames do we update the target network - replay_size: capacity of the replay buffer - warm_start_size: how many samples do we use to fill our buffer at the start of training - eps_last_frame: what frame should epsilon stop decaying - eps_start: starting value of epsilon - eps_end: final value of epsilon - episode_length: max length of an episode - warm_start_steps: max episode reward in the environment - """ - super().__init__() - self.save_hyperparameters() - - self.env = gym.make(self.hparams.env) - obs_size = self.env.observation_space.shape[0] - n_actions = self.env.action_space.n - - self.net = DQN(obs_size, n_actions) - self.target_net = DQN(obs_size, n_actions) - - self.buffer = ReplayBuffer(self.hparams.replay_size) - self.agent = Agent(self.env, self.buffer) - self.total_reward = 0 - self.episode_reward = 0 - self.populate(self.hparams.warm_start_steps) - - def populate(self, steps: int = 1000) -> None: - """Carries out several random steps through the environment to initially fill up the replay buffer with - experiences. - - Args: - steps: number of random steps to populate the buffer with - """ - for _ in range(steps): - self.agent.play_step(self.net, epsilon=1.0) - - def forward(self, x: Tensor) -> Tensor: - """Passes in a state x through the network and gets the q_values of each action as an output. - - Args: - x: environment state - - Returns: - q values - """ - output = self.net(x) - return output - - def dqn_mse_loss(self, batch: Tuple[Tensor, Tensor]) -> Tensor: - """Calculates the mse loss using a mini batch from the replay buffer. - - Args: - batch: current mini batch of replay data - - Returns: - loss - """ - states, actions, rewards, dones, next_states = batch - - state_action_values = self.net(states).gather(1, actions.long().unsqueeze(-1)).squeeze(-1) - - with torch.no_grad(): - next_state_values = self.target_net(next_states).max(1)[0] - next_state_values[dones] = 0.0 - next_state_values = next_state_values.detach() - - expected_state_action_values = next_state_values * self.hparams.gamma + rewards - - return nn.MSELoss()(state_action_values, expected_state_action_values) - - def get_epsilon(self, start: int, end: int, frames: int) -> float: - if self.global_step > frames: - return end - return start - (self.global_step / frames) * (start - end) - - def training_step(self, batch: Tuple[Tensor, Tensor], nb_batch) -> OrderedDict: - """Carries out a single step through the environment to update the replay buffer. Then calculates loss - based on the minibatch recieved. - - Args: - batch: current mini batch of replay data - nb_batch: batch number - - Returns: - Training loss and log metrics - """ - device = self.get_device(batch) - epsilon = self.get_epsilon(self.hparams.eps_start, self.hparams.eps_end, self.hparams.eps_last_frame) - self.log("epsilon", epsilon) - - # step through environment with agent - reward, done = self.agent.play_step(self.net, epsilon, device) - self.episode_reward += reward - self.log("episode reward", self.episode_reward) - - # calculates training loss - loss = self.dqn_mse_loss(batch) - - if done: - self.total_reward = self.episode_reward - self.episode_reward = 0 - - # Soft update of target network - if self.global_step % self.hparams.sync_rate == 0: - self.target_net.load_state_dict(self.net.state_dict()) - - self.log_dict( - { - "reward": reward, - "train_loss": loss, - } - ) - self.log("total_reward", self.total_reward, prog_bar=True) - self.log("steps", self.global_step, logger=False, prog_bar=True) - - return loss - - def configure_optimizers(self) -> List[Optimizer]: - """Initialize Adam optimizer.""" - optimizer = Adam(self.net.parameters(), lr=self.hparams.lr) - return optimizer - - def __dataloader(self) -> DataLoader: - """Initialize the Replay Buffer dataset used for retrieving experiences.""" - dataset = RLDataset(self.buffer, self.hparams.episode_length) - dataloader = DataLoader( - dataset=dataset, - batch_size=self.hparams.batch_size, - ) - return dataloader - - def train_dataloader(self) -> DataLoader: - """Get train loader.""" - return self.__dataloader() - - def get_device(self, batch) -> str: - """Retrieve device currently being used by minibatch.""" - return batch[0].device.index if self.on_gpu else "cpu" - -model = DQNLightning() - -trainer = pl.Trainer( - accelerator="auto", - devices=1 if torch.cuda.is_available() else None, # limiting got iPython runs - max_epochs=150, - val_check_interval=50, - logger=CSVLogger(save_dir="logs/"), -) - -trainer.fit(model) \ No newline at end of file diff --git a/src/icaart_ensemble_hyper.py b/src/icaart_ensemble_hyper.py deleted file mode 100644 index 1d99c9a2559b6408c82dd5f6fd1721c805d9bbd4..0000000000000000000000000000000000000000 --- a/src/icaart_ensemble_hyper.py +++ /dev/null @@ -1,236 +0,0 @@ -import argparse -import logging -import pickle -import sys -from pathlib import Path - -import lightning.pytorch as pl -import optuna -import torch -import yaml -from lightning.pytorch.callbacks import EarlyStopping, ModelCheckpoint -from lightning.pytorch.loggers import TensorBoardLogger - -import utils.helpers as hp -import utils.utility as ut -from callbacks import OptunaPruningCallback, WaVoCallback -from data_tools.data_module import WaVoDataModule -from ensemble_models import WaVoLightningEnsemble, WaVoLightningAttentionEnsemble - -use_cuda = torch.cuda.is_available() -if use_cuda: - accelerator = 'cuda' - torch.set_float32_matmul_precision('high') - - max_free = 0 - best_device = None - for j in range(torch.cuda.device_count()): - free , _ = torch.cuda.mem_get_info(j) - if free > max_free: - max_free = free - best_device = j - devices = [best_device] - -else: - accelerator = 'cpu' - devices = 'auto' - -storage_base = 'sqlite:///../../../../data-project/KIWaVo/models/optuna/' - -if ut.debugger_is_active(): - max_epochs = 2 - default_storage_name = 'sqlite:///../../../data-project/KIWaVo/models/optuna/icaart_ensemble_debug_01.db' - -else: - max_epochs = 2000 - default_storage_name = 'sqlite:///../../../../data-project/KIWaVo/models/optuna/icaart_ensemble_01.db' - - -class Objective: - """ - This class defines the objective function for hyperparameter tuning using Optuna library. - - Args: - filename (str|Path): Path to .csv file, first column should be a timeindex - model_dir (str|Path): Path to the directory containing the base models - log_dir (str|Path): Path to the logging directory - select_strat (str): How to select the base models (random or first n) - model_count (int): How many base models to use - monitor (str, optional): metric to monitor. Defaults to 'hp/val_loss'. - """ - def __init__(self, filename,modeldir,logdir,select_strat,model_count,monitor= 'hp/val_loss', **kwargs): - # Hold these implementation specific arguments as the fields of the class. - self.filename = filename - self.model_dir = modeldir - self.log_dir = logdir - self.select_strat = select_strat - self.model_count = model_count - self.monitor = monitor - - def _get_callbacks(self, trial): - checkpoint_callback = ModelCheckpoint(save_top_k=1, monitor=self.monitor,save_weights_only=True) - pruning_callback = OptunaPruningCallback(trial, monitor=self.monitor) - early_stop_callback = EarlyStopping(monitor=self.monitor, mode="min", patience=3) - my_callback = WaVoCallback(ensemble=True) - - return my_callback, [checkpoint_callback,pruning_callback, early_stop_callback, my_callback] - - def _get_model_params(self,trial): - model_params = dict( - hidden_size = trial.suggest_int("hidden_size", 32, 512), - num_layers = trial.suggest_int("n_layers", 2, 4), - dropout = 0.25, - learning_rate = trial.suggest_float("lr", 0.00001, 0.01), - norm_func = trial.suggest_categorical("norm_func", ['softmax','minmax']), - ) - - return model_params - - def __call__(self, trial): - - model_params = self._get_model_params(trial) - #log_dir = '../../../data-project/KIWaVo/models/ensemble_debug/' - #model_dir = Path('../../../data-project/KIWaVo/models/icaarts_hollingstedt/lightning_logs/') - model_list = [] - model_path_list = [] - yaml_data = None - - if self.select_strat == 'random': - all_models = [x.name.split('_')[1] for x in self.model_dir.iterdir()] - - elif self.select_strat == 'first': - model_choice = list(range(self.model_count)) - - #for s in [0,1,2]: - for s in model_choice: - temp_dir = self.model_dir / f'version_{s}/' - - model_list.append(hp.load_model_cuda(temp_dir,use_cuda=use_cuda,devices=devices)) - model_path_list.append(str(temp_dir.resolve())) - - if yaml_data is None: - yaml_data = hp.load_settings_model(temp_dir) - #with open(temp_dir / 'hparams.yaml', 'r') as file: - # yaml_data = yaml.load(file, Loader=yaml.FullLoader) - # yaml_data['scaler'] = pickle.loads(yaml_data['scaler']) - - config = { - 'scaler' : yaml_data['scaler'], #TODO test how this works without giving scaler etc. outside of jupyterlab - #'filename' : yaml_data['filename'], - 'filename' : str(self.filename), - 'level_name_org' : yaml_data['level_name_org'], - 'out_size' : yaml_data['out_size'], - 'threshold' : yaml_data['threshold'], - 'feature_count' : yaml_data['feature_count'], - 'differencing' : yaml_data['differencing'], - 'model_architecture' : 'ensemble'} - - print("ACHTUNG GGF. FALSCHES MODELL") - #ensemble_model = WaVoLightningEnsemble(model_list,model_path_list,**model_params)#TODO 'ÄNDERN! - ensemble_model = WaVoLightningAttentionEnsemble(model_list,model_path_list,**model_params) - - config['in_size'] = ensemble_model.max_in_size - - data_module = WaVoDataModule(**config) - - - - logging.info("Params: %s", trial.params) - - - my_callback, callbacks = self._get_callbacks(trial) - logger = TensorBoardLogger(self.log_dir, default_hp_metric=False) - - - trainer = pl.Trainer(default_root_dir=self.log_dir, - gradient_clip_val=0.5, - logger=logger, - accelerator=accelerator, - devices=devices, - callbacks=callbacks, - max_epochs=max_epochs, - log_every_n_steps=10, - ) - trainer.fit(ensemble_model, data_module) - - #save metrics to optuna - model_path = str(Path(trainer.log_dir).resolve()) - logging.info("model_path: %s", model_path) - trial.set_user_attr("model_path", model_path) - for metric in ['hp/val_nse', 'hp/val_mae', 'hp/val_mae_flood']: - for i in [23, 47]: - trial.set_user_attr(f'{metric}_{i}', my_callback.metrics[metric][i].item()) - - return my_callback.metrics[self.monitor].item() - - - -def parse_args() -> argparse.Namespace: - """ Parse all the arguments and provides some help in the command line - """ - - parser: argparse.ArgumentParser = argparse.ArgumentParser(description='Execute experiments for exp_icaart.') - parser.add_argument('filename', metavar='datafile', type=Path,help='The path to your input data.') - parser.add_argument('modeldir', metavar='modeldir', type=Path,help='The path to your base models.') - parser.add_argument('logdir', type=Path,help='set a directory for logs and model checkpoints.') - parser.add_argument('trials', metavar='trials',type=int, default=100,help='How many trials to run.') - parser.add_argument('select_strat', choices=['random','first']) - parser.add_argument('model_count', metavar='mc',type=int, default=5,help='How many base models to use.') - - parser.add_argument('--expname', metavar='experiment_name',type=str, default='nameless',help='The name of the experiment.') - parser.add_argument('--storagename', metavar='storage_name',type=str, default=None,help='The database for the experiment.') - - return parser.parse_args() - - - - -def main(): - parsed_args = parse_args() - if not parsed_args.logdir.exists(): - parsed_args.logdir.mkdir(parents=True) - - if False: - pruner = optuna.pruners.HyperbandPruner( - min_resource=1, max_resource='auto', reduction_factor=3, bootstrap_count=0) - else: - pruner = optuna.pruners.NopPruner() - - - - study_name = f"{parsed_args.filename.stem} {parsed_args.expname}" # Unique identifier of the study. - storage_name = default_storage_name if parsed_args.storagename is None else f"{storage_base}{parsed_args.storagename}.db" - - # Logging, add stream handler of stdout to show the messages - logging.basicConfig(level=logging.INFO) - logFormatter = logging.Formatter('%(asctime)s;%(levelname)s;%(message)s',datefmt='%Y-%m-%d %H:%M:%S') - fileHandler = logging.FileHandler(parsed_args.logdir / 'ensemble_hyper.log',) - consoleHandler = logging.StreamHandler(sys.stdout) - fileHandler.setFormatter(logFormatter) - consoleHandler.setFormatter(logFormatter) - logging.getLogger().addHandler(fileHandler) - #logging.getLogger().addHandler(consoleHandler) - optuna.logging.get_logger("optuna").addHandler(fileHandler) - optuna.logging.get_logger("optuna").addHandler(consoleHandler) - - logging.info('Start of this execution======================================================================') - logging.info("Executing %s with device %s and parameters %s ",sys.argv[0],devices,sys.argv[1:]) - - - study = optuna.create_study( - study_name=study_name, - storage=storage_name, - direction="minimize", - pruner=pruner, - load_if_exists=True) - - study.set_metric_names(["hp/val_loss"]) - objective = Objective(**vars(parsed_args),gc_after_trial=True) - - - study.optimize(objective,n_trials=parsed_args.trials,timeout=None,callbacks=[lambda study, trial: torch.cuda.empty_cache()]) - - -if __name__ == "__main__": - #TODO command line arguments - main() diff --git a/src/icaart_random_base.py b/src/icaart_random_base.py deleted file mode 100644 index 8a6b3f5dea646ae16bd1edab3e937af8418b4e48..0000000000000000000000000000000000000000 --- a/src/icaart_random_base.py +++ /dev/null @@ -1,233 +0,0 @@ -import argparse -import logging -import pickle -import sys -from pathlib import Path -import random -import lightning.pytorch as pl -import optuna -import torch -import yaml -from lightning.pytorch.callbacks import EarlyStopping, ModelCheckpoint -from lightning.pytorch.loggers import TensorBoardLogger - -import utils.helpers as hp -import utils.utility as ut -from callbacks import OptunaPruningCallback, WaVoCallback -from data_tools.data_module import WaVoDataModule -from ensemble_models import WaVoLightningEnsemble - -use_cuda = torch.cuda.is_available() -if use_cuda: - accelerator = 'cuda' - torch.set_float32_matmul_precision('high') - - max_free = 0 - best_device = None - for j in range(torch.cuda.device_count()): - free , _ = torch.cuda.mem_get_info(j) - if free > max_free: - max_free = free - best_device = j - devices = [best_device] - -else: - accelerator = 'cpu' - devices = 'auto' - -storage_base = 'sqlite:///../../../../data-project/KIWaVo/models/optuna/' - -if ut.debugger_is_active(): - max_epochs = 2 - default_storage_name = 'sqlite:///../../../data-project/KIWaVo/models/optuna/icaart_ensemble_debug_01.db' - -else: - max_epochs = 100 - default_storage_name = 'sqlite:///../../../../data-project/KIWaVo/models/optuna/icaart_ensemble_01.db' - - -class Objective: - """ - This class defines the objective function for hyperparameter tuning using Optuna library. - - Args: - filename (str|Path): Path to .csv file, first column should be a timeindex - model_dir (str|Path): Path to the directory containing the base models - log_dir (str|Path): Path to the logging directory - model_count (int): How many base models to use - monitor (str, optional): metric to monitor. Defaults to 'hp/val_loss'. - """ - def __init__(self, filename,modeldir,logdir,model_count,monitor= 'hp/val_loss', **kwargs): - # Hold these implementation specific arguments as the fields of the class. - self.filename = filename - self.model_dir = modeldir - self.log_dir = logdir - self.model_count = model_count - self.monitor = monitor - self.all_models = [int(x.name.split('_')[1]) for x in self.model_dir.iterdir()] - - - def _get_callbacks(self, trial): - checkpoint_callback = ModelCheckpoint(save_top_k=1, monitor=self.monitor,save_weights_only=True) - pruning_callback = OptunaPruningCallback(trial, monitor=self.monitor) - early_stop_callback = EarlyStopping(monitor=self.monitor, mode="min", patience=3) - my_callback = WaVoCallback(ensemble=True) - - return my_callback, [checkpoint_callback,pruning_callback, early_stop_callback, my_callback] - - def _get_model_params(self,trial): - #This is just so that optuna will actually execute the number of trials we want, it stops when it testes all parameter combinations - place_holder = trial.suggest_int("place_holder", 0, 10000) - - model_params = dict( - hidden_size = trial.suggest_int("hidden_size", 512, 512), - num_layers = trial.suggest_int("n_layers", 2, 2), - dropout = 0.25, - learning_rate = trial.suggest_float("lr", 0.002, 0.002), - norm_func = trial.suggest_categorical("norm_func", ['minmax']), - ) - - return model_params - - def __call__(self, trial): - - model_params = self._get_model_params(trial) - #log_dir = '../../../data-project/KIWaVo/models/ensemble_debug/' - #model_dir = Path('../../../data-project/KIWaVo/models/icaarts_hollingstedt/lightning_logs/') - model_list = [] - model_path_list = [] - yaml_data = None - - - - model_choice = random.sample(self.all_models, self.model_count) - - #for s in [0,1,2]: - for s in model_choice: - temp_dir = self.model_dir / f'version_{s}/' - - model_list.append(hp.load_model_cuda(temp_dir,use_cuda=use_cuda,devices=devices)) - model_path_list.append(str(temp_dir.resolve())) - - if yaml_data is None: - yaml_data = hp.load_settings_model(temp_dir) - #with open(temp_dir / 'hparams.yaml', 'r') as file: - # yaml_data = yaml.load(file, Loader=yaml.FullLoader) - # yaml_data['scaler'] = pickle.loads(yaml_data['scaler']) - - config = { - 'scaler' : yaml_data['scaler'], #TODO test how this works without giving scaler etc. outside of jupyterlab - #'filename' : yaml_data['filename'], - 'filename' : str(self.filename), - 'level_name_org' : yaml_data['level_name_org'], - 'out_size' : yaml_data['out_size'], - 'threshold' : yaml_data['threshold'], - 'feature_count' : yaml_data['feature_count'], - 'differencing' : yaml_data['differencing'], - 'model_architecture' : 'ensemble'} - - ensemble_model = WaVoLightningEnsemble(model_list,model_path_list,**model_params) - config['in_size'] = ensemble_model.max_in_size - - data_module = WaVoDataModule(**config) - - - - logging.info("Params: %s", trial.params) - - - my_callback, callbacks = self._get_callbacks(trial) - logger = TensorBoardLogger(self.log_dir, default_hp_metric=False) - - - trainer = pl.Trainer(default_root_dir=self.log_dir, - gradient_clip_val=0.5, - logger=logger, - accelerator=accelerator, - devices=devices, - callbacks=callbacks, - max_epochs=max_epochs, - log_every_n_steps=10, - ) - trainer.fit(ensemble_model, data_module) - - #save metrics to optuna - model_path = str(Path(trainer.log_dir).resolve()) - logging.info("model_path: %s", model_path) - trial.set_user_attr("model_path", model_path) - for metric in ['hp/val_nse', 'hp/val_mae', 'hp/val_mae_flood']: - for i in [23, 47]: - trial.set_user_attr(f'{metric}_{i}', my_callback.metrics[metric][i].item()) - - return my_callback.metrics[self.monitor].item() - - - -def parse_args() -> argparse.Namespace: - """ Parse all the arguments and provides some help in the command line - """ - - parser: argparse.ArgumentParser = argparse.ArgumentParser(description='Execute experiments for exp_icaart.') - parser.add_argument('filename', metavar='datafile', type=Path,help='The path to your input data.') - parser.add_argument('modeldir', metavar='modeldir', type=Path,help='The path to your base models.') - parser.add_argument('logdir', type=Path,help='set a directory for logs and model checkpoints.') - parser.add_argument('trials', metavar='trials',type=int, default=100,help='How many trials to run.') - parser.add_argument('model_count', metavar='mc',type=int, default=5,help='How many base models to use.') - - parser.add_argument('--expname', metavar='experiment_name',type=str, default='nameless',help='The name of the experiment.') - parser.add_argument('--storagename', metavar='storage_name',type=str, default=None,help='The database for the experiment.') - - return parser.parse_args() - - - - -def main(): - parsed_args = parse_args() - if not parsed_args.logdir.exists(): - parsed_args.logdir.mkdir(parents=True) - - if False: - pruner = optuna.pruners.HyperbandPruner( - min_resource=1, max_resource='auto', reduction_factor=3, bootstrap_count=0) - else: - pruner = optuna.pruners.NopPruner() - - - - study_name = f"{parsed_args.filename.stem} {parsed_args.expname}" # Unique identifier of the study. - storage_name = default_storage_name if parsed_args.storagename is None else f"{storage_base}{parsed_args.storagename}.db" - - # Logging, add stream handler of stdout to show the messages - logging.basicConfig(level=logging.INFO) - logFormatter = logging.Formatter('%(asctime)s;%(levelname)s;%(message)s',datefmt='%Y-%m-%d %H:%M:%S') - fileHandler = logging.FileHandler(parsed_args.logdir / 'ensemble_hyper.log',) - consoleHandler = logging.StreamHandler(sys.stdout) - fileHandler.setFormatter(logFormatter) - consoleHandler.setFormatter(logFormatter) - logging.getLogger().addHandler(fileHandler) - #logging.getLogger().addHandler(consoleHandler) - optuna.logging.get_logger("optuna").addHandler(fileHandler) - optuna.logging.get_logger("optuna").addHandler(consoleHandler) - - logging.info('Start of this execution======================================================================') - logging.info("Executing %s with device %s and parameters %s ",sys.argv[0],devices,sys.argv[1:]) - - - study = optuna.create_study( - study_name=study_name, - storage=storage_name, - direction="minimize", - pruner=pruner, - load_if_exists=True) - - study.set_metric_names(["hp/val_loss"]) - objective = Objective(**vars(parsed_args),gc_after_trial=True) - - - study.optimize(objective,n_trials=parsed_args.trials,timeout=None,callbacks=[lambda study, trial: torch.cuda.empty_cache()]) - - -if __name__ == "__main__": - #TODO command line arguments - main() diff --git a/src/icaart_sizes.py b/src/icaart_sizes.py deleted file mode 100644 index 532255d01dacbdb16abea421c4eca05a482fe6e0..0000000000000000000000000000000000000000 --- a/src/icaart_sizes.py +++ /dev/null @@ -1,237 +0,0 @@ -import argparse -import logging -import pickle -import sys -from pathlib import Path -import random -import lightning.pytorch as pl -import optuna -import torch -import yaml -from lightning.pytorch.callbacks import EarlyStopping, ModelCheckpoint -from lightning.pytorch.loggers import TensorBoardLogger - -import utils.helpers as hp -import utils.utility as ut -from callbacks import OptunaPruningCallback, WaVoCallback -from data_tools.data_module import WaVoDataModule -from ensemble_models import WaVoLightningEnsemble - -use_cuda = torch.cuda.is_available() -if use_cuda: - accelerator = 'cuda' - torch.set_float32_matmul_precision('high') - - max_free = 0 - best_device = None - for j in range(torch.cuda.device_count()): - free , _ = torch.cuda.mem_get_info(j) - if free > max_free: - max_free = free - best_device = j - devices = [best_device] - -else: - accelerator = 'cpu' - devices = 'auto' - -storage_base = 'sqlite:///../../../../data-project/KIWaVo/models/optuna/' - -if ut.debugger_is_active(): - max_epochs = 2 - default_storage_name = 'sqlite:///../../../data-project/KIWaVo/models/optuna/icaart_ensemble_debug_01.db' - -else: - max_epochs = 100 - default_storage_name = 'sqlite:///../../../../data-project/KIWaVo/models/optuna/icaart_ensemble_01.db' - - -class Objective: - """ - This class defines the objective function for hyperparameter tuning using Optuna library. - - Args: - filename (str|Path): Path to .csv file, first column should be a timeindex - model_dir (str|Path): Path to the directory containing the base models - log_dir (str|Path): Path to the logging directory - monitor (str, optional): metric to monitor. Defaults to 'hp/val_loss'. - """ - def __init__(self, filename,modeldir,logdir,monitor= 'hp/val_loss', **kwargs): - # Hold these implementation specific arguments as the fields of the class. - self.filename = filename - self.model_dir = modeldir - self.log_dir = logdir - self.monitor = monitor - self.all_models = [int(x.name.split('_')[1]) for x in self.model_dir.iterdir()] - - - def _get_callbacks(self, trial): - checkpoint_callback = ModelCheckpoint(save_top_k=1, monitor=self.monitor,save_weights_only=True) - pruning_callback = OptunaPruningCallback(trial, monitor=self.monitor) - early_stop_callback = EarlyStopping(monitor=self.monitor, mode="min", patience=3) - my_callback = WaVoCallback(ensemble=True) - - return my_callback, [checkpoint_callback,pruning_callback, early_stop_callback, my_callback] - - def _get_model_params(self,trial): - #This is just so that optuna will actually execute the number of trials we want, it stops when it testes all parameter combinations - ensemble_size = trial.suggest_categorical("ensemble_size",[5,10,15,20]) - #temperature = trial.suggest_categorical("temperature",[1/10,1,10]) - - model_params = dict( - hidden_size = trial.suggest_int("hidden_size", 512, 512), - num_layers = trial.suggest_int("n_layers", 2, 2), - dropout = 0.25, - learning_rate = trial.suggest_float("lr", 0.002, 0.002), - norm_func = trial.suggest_categorical("norm_func", ['softmax']), - ) - - return model_params, ensemble_size - - def __call__(self, trial): - - model_params,ensemble_size = self._get_model_params(trial) - #log_dir = '../../../data-project/KIWaVo/models/ensemble_debug/' - #model_dir = Path('../../../data-project/KIWaVo/models/icaarts_hollingstedt/lightning_logs/') - model_list = [] - model_path_list = [] - yaml_data = None - - - - model_choice = random.sample(self.all_models, ensemble_size) - - #for s in [0,1,2]: - for s in model_choice: - temp_dir = self.model_dir / f'version_{s}/' - - model_list.append(hp.load_model_cuda(temp_dir,use_cuda=use_cuda,devices=devices)) - model_path_list.append(str(temp_dir.resolve())) - - if yaml_data is None: - yaml_data = hp.load_settings_model(temp_dir) - #with open(temp_dir / 'hparams.yaml', 'r') as file: - # yaml_data = yaml.load(file, Loader=yaml.FullLoader) - # yaml_data['scaler'] = pickle.loads(yaml_data['scaler']) - - config = { - 'scaler' : yaml_data['scaler'], #TODO test how this works without giving scaler etc. outside of jupyterlab - #'filename' : yaml_data['filename'], - 'filename' : str(self.filename), - 'level_name_org' : yaml_data['level_name_org'], - 'out_size' : yaml_data['out_size'], - 'threshold' : yaml_data['threshold'], - 'feature_count' : yaml_data['feature_count'], - 'differencing' : yaml_data['differencing'], - 'model_architecture' : 'ensemble'} - - ensemble_model = WaVoLightningEnsemble(model_list,model_path_list,**model_params) - config['in_size'] = ensemble_model.max_in_size - - data_module = WaVoDataModule(**config) - - - - logging.info("Params: %s", trial.params) - - - my_callback, callbacks = self._get_callbacks(trial) - logger = TensorBoardLogger(self.log_dir, default_hp_metric=False) - - - trainer = pl.Trainer(default_root_dir=self.log_dir, - gradient_clip_val=0.5, - logger=logger, - accelerator=accelerator, - devices=devices, - callbacks=callbacks, - max_epochs=max_epochs, - log_every_n_steps=10, - ) - trainer.fit(ensemble_model, data_module) - - #save metrics to optuna - model_path = str(Path(trainer.log_dir).resolve()) - logging.info("model_path: %s", model_path) - trial.set_user_attr("model_path", model_path) - for metric in ['hp/val_nse', 'hp/val_mae', 'hp/val_mae_flood']: - for i in [23, 47]: - trial.set_user_attr(f'{metric}_{i}', my_callback.metrics[metric][i].item()) - - return my_callback.metrics[self.monitor].item() - - - -def parse_args() -> argparse.Namespace: - """ Parse all the arguments and provides some help in the command line - """ - - parser: argparse.ArgumentParser = argparse.ArgumentParser(description='Execute experiments for exp_icaart.') - parser.add_argument('filename', metavar='datafile', type=Path,help='The path to your input data.') - parser.add_argument('modeldir', metavar='modeldir', type=Path,help='The path to your base models.') - parser.add_argument('logdir', type=Path,help='set a directory for logs and model checkpoints.') - parser.add_argument('trials', metavar='trials',type=int, default=100,help='How many trials to run.') - #parser.add_argument('model_count', metavar='mc',type=int, default=5,help='How many base models to use.') - - parser.add_argument('--expname', metavar='experiment_name',type=str, default='nameless',help='The name of the experiment.') - parser.add_argument('--storagename', metavar='storage_name',type=str, default=None,help='The database for the experiment.') - - return parser.parse_args() - - - - -def main(): - parsed_args = parse_args() - if not parsed_args.logdir.exists(): - parsed_args.logdir.mkdir(parents=True) - - if False: - pruner = optuna.pruners.HyperbandPruner( - min_resource=1, max_resource='auto', reduction_factor=3, bootstrap_count=0) - else: - pruner = optuna.pruners.NopPruner() - - - - study_name = f"{parsed_args.filename.stem} {parsed_args.expname}" # Unique identifier of the study. - storage_name = default_storage_name if parsed_args.storagename is None else f"{storage_base}{parsed_args.storagename}.db" - - # Logging, add stream handler of stdout to show the messages - logging.basicConfig(level=logging.INFO) - logFormatter = logging.Formatter('%(asctime)s;%(levelname)s;%(message)s',datefmt='%Y-%m-%d %H:%M:%S') - fileHandler = logging.FileHandler(parsed_args.logdir / 'ensemble_hyper.log',) - consoleHandler = logging.StreamHandler(sys.stdout) - fileHandler.setFormatter(logFormatter) - consoleHandler.setFormatter(logFormatter) - logging.getLogger().addHandler(fileHandler) - #logging.getLogger().addHandler(consoleHandler) - optuna.logging.get_logger("optuna").addHandler(fileHandler) - optuna.logging.get_logger("optuna").addHandler(consoleHandler) - - logging.info('Start of this execution======================================================================') - logging.info("Executing %s with device %s and parameters %s ",sys.argv[0],devices,sys.argv[1:]) - - - ens_sizes =[5,10,15,20] - search_space = {"ensemble_size":ens_sizes*parsed_args.trials } - sampler=optuna.samplers.GridSampler(search_space) - - study = optuna.create_study( - sampler=sampler, - study_name=study_name, - storage=storage_name, - direction="minimize", - pruner=pruner, - load_if_exists=True) - - study.set_metric_names(["hp/val_loss"]) - objective = Objective(**vars(parsed_args),gc_after_trial=True) - - - study.optimize(objective,n_trials=len(ens_sizes)*parsed_args.trials,timeout=None,callbacks=[lambda study, trial: torch.cuda.empty_cache()]) - - -if __name__ == "__main__": - #TODO command line arguments - main() diff --git a/src/main.py b/src/main.py deleted file mode 100644 index 9ba378440bbfbca27b0387bbb557cd9dd65de492..0000000000000000000000000000000000000000 --- a/src/main.py +++ /dev/null @@ -1,90 +0,0 @@ -""" -Main Module to start training -""" -import lightning.pytorch as pl -import torch -from lightning.pytorch.callbacks.early_stopping import EarlyStopping - -from lightning.pytorch.loggers import TensorBoardLogger - -import metrics as mt -from callbacks import WaVoCallback -from data_tools import WaVoDataModule -from models import WaVoLightningModule - -# print('set float matmul precision to high') -torch.set_float32_matmul_precision('high') - - -def main(): - """Main Method - """ - - config = { - # 'filename': '../../data/input/Poetrau20.csv', - # 'log_dir': '../../models_torch/pötrau', - # 'level_name_org': 'WPoetrau_pegel_cm', - 'filename': '../../data/input/Treia.csv', - 'log_dir': '../../models_torch/treia', - 'level_name_org': 'Treia_pegel_cm', - 'train': 0.7, - 'val': 0.15, - 'test': 0.15, - 'batch_size': 2048, - 'in_size': 144, - 'out_size': 48, - # 'scale_target':False, - # 'differencing':1, - 'differencing': 0, - 'percentile': 0.95 - } - - found_lr = None - tune_lr = False - - for _ in range(10): - - data_module = WaVoDataModule(**config) - data_module.prepare_data() - data_module.setup(stage='fit') - - # TODO kwargs for neural network? - model = WaVoLightningModule( - data_module.mean, - data_module.scale, - feature_count=data_module.feature_count, - in_size=data_module.hparams.in_size, - out_size=data_module.hparams.out_size, - hidden_size_lstm=128, - hidden_size=64, - learning_rate=found_lr or 0.001) # TODO tidy up scaler mess - - # Callbacks & Logging - early_stop_callback = EarlyStopping( - monitor="hp/val_loss", mode="min", patience=3) - my_callback = WaVoCallback() - callbacks = [early_stop_callback, my_callback] - logger = TensorBoardLogger(config['log_dir'], default_hp_metric=False) - - trainer = pl.Trainer(default_root_dir=config['log_dir'], - logger=logger, - # accelerator="auto", - accelerator="gpu", - devices=1, - callbacks=callbacks, - max_epochs=50, - log_every_n_steps=1, - ) - - if found_lr is None and tune_lr: - tuner = pl.tuner.Tuner(trainer) - # tuner.scale_batch_size(model, datamodule=data_module,init_val=512, max_trials=3) - # TODO calling data_module here messes with the hyperparameter logging - lr_finder = tuner.lr_find(model, datamodule=data_module) - found_lr = lr_finder.suggestion() - - trainer.fit(model, data_module) - - -if __name__ == "__main__": - main() diff --git a/src/main_cli.py b/src/main_cli.py deleted file mode 100644 index a2dd91eb1b047906dc3a766a7c8e3f56390431ce..0000000000000000000000000000000000000000 --- a/src/main_cli.py +++ /dev/null @@ -1,66 +0,0 @@ -""" -Main Module to start training -""" -import lightning.pytorch as pl -import torch -from lightning.pytorch.callbacks.early_stopping import EarlyStopping -from lightning.pytorch.cli import LightningCLI - -from lightning.pytorch.loggers import TensorBoardLogger - -import metrics as mt -from callbacks import WaVoCallback -from data_tools import WaVoDataModule -from models import WaVoLightningModule - -# print('set float matmul precision to high') -torch.set_float32_matmul_precision('high') - - -class MyLightningCLI(LightningCLI): - # def __init__(): - # super().__init__() - - def add_arguments_to_parser(self, parser): - parser.link_arguments("data.mean", "model.mean", - apply_on="instantiate") - parser.link_arguments("data.scale", "model.scale", - apply_on="instantiate") - parser.link_arguments( - "data.threshold", "model.threshold", apply_on="instantiate") - parser.link_arguments("data.feature_count", - "model.feature_count", apply_on="instantiate") - parser.link_arguments("data.in_size", "model.in_size") - parser.link_arguments("data.out_size", "model.out_size") - - parser.add_lightning_class_args(WaVoCallback, "wavo_callback") - # parser.set_defaults({"wavo_callback.chosen_metrics": ['nse', 'p10']}) - - parser.add_lightning_class_args(EarlyStopping, "early_stopping") - parser.set_defaults({"early_stopping.monitor": "hp/val_loss", - 'early_stopping.mode': 'min', "early_stopping.patience": 2}) - # monitor="hp/val_loss", mode="min" - - -if __name__ == "__main__": - - cli = MyLightningCLI(model_class=WaVoLightningModule, - datamodule_class=WaVoDataModuleCLI, - ) - # run=False - # hidden_size_lstm: 128 - # hidden_size: 64 - # num_layers: 2 - # dropout: 0.2 - # learning_rate: 0.001 - # lightning_model = WaVoLightningModule( - # cli.data_module.mean, - # cli.data_module.scale, - # cli.data_module.threshold, - # feature_count=cli.data_module.feature_count, - # in_size=cli.data_module.hparams.in_size, - # out_size=cli.data_module.hparams.out_size, - # hidden_size_lstm=cli.model.hidden_size_lstm, - # hidden_size=cli.model.hidden_size, - # learning_rate=cli.model.learning_rate) - # cli.trainer.fit(lightning_model, datamodule=cli.datamodule) diff --git a/src/main_hyper.py b/src/main_hyper.py index 50dd34890235179cfef1d5620183aa3fdb6bf47d..03a514d7553324e37fc2ee7ed634a32ee230388e 100644 --- a/src/main_hyper.py +++ b/src/main_hyper.py @@ -1,6 +1,7 @@ """ Main Module to start training """ + import argparse import gc import logging @@ -15,46 +16,49 @@ from lightning.pytorch.loggers import TensorBoardLogger from lightning.pytorch.tuner import Tuner import utils.utility as ut -from callbacks import OptunaPruningCallback, WaVoCallback +from utils.callbacks import OptunaPruningCallback, WaVoCallback from data_tools.data_module import WaVoDataModule from models.lightning_module import WaVoLightningModule use_cuda = torch.cuda.is_available() if use_cuda: - accelerator = 'cuda' - #accelerator = 'gpu' - torch.set_float32_matmul_precision('high') + accelerator = "cuda" + # accelerator = 'gpu' + torch.set_float32_matmul_precision("high") else: - accelerator = 'cpu' + accelerator = "cpu" # CONSTANTS -#storage_base = 'sqlite:///../../models_torch/optuna/' -storage_base = 'sqlite:///../../../../data-project/KIWaVo/models/optuna/' +# storage_base = 'sqlite:///../../models_torch/optuna/' +storage_base = "sqlite:///../../../../data-project/KIWaVo/models/optuna/" if ut.debugger_is_active(): - #default_storage_name = 'sqlite:///../models_torch/optuna/debug_01.db' - default_storage_name = 'sqlite:///../../../data-project/KIWaVo/models/optuna/debug_03.db' + # default_storage_name = 'sqlite:///../models_torch/optuna/debug_01.db' + default_storage_name = ( + "sqlite:///../../../data-project/KIWaVo/models/optuna/debug_02.db" + ) n_trials = 2 max_epochs = 2 default_max_trials = 1 else: - #default_storage_name = 'sqlite:///../../models_torch/optuna/optimization_01.db' - default_storage_name = 'sqlite:///../../../../data-project/KIWaVo/models/optuna/optimization_01.db' + # default_storage_name = 'sqlite:///../../models_torch/optuna/optimization_01.db' + default_storage_name = ( + "sqlite:///../../../../data-project/KIWaVo/models/optuna/optimization_01.db" + ) n_trials = 100 max_epochs = 2000 default_max_trials = None if use_cuda: - #default_batch_size = 1024 + # default_batch_size = 1024 default_batch_size = 256 - - #This is for multi GPU training, doesn't work with optuna - #devices = [] - #for j in range(torch.cuda.device_count()): + # This is for multi GPU training, doesn't work with optuna + # devices = [] + # for j in range(torch.cuda.device_count()): # free , max_gpu = torch.cuda.mem_get_info(j) # if free > (max_gpu /2): # devices.append(j) @@ -62,15 +66,15 @@ if use_cuda: max_free = 0 best_device = None for j in range(torch.cuda.device_count()): - free , _ = torch.cuda.mem_get_info(j) + free, _ = torch.cuda.mem_get_info(j) if free > max_free: max_free = free best_device = j devices = [best_device] - #torch.cuda.set_per_process_memory_fraction(0.7, best_device) + # torch.cuda.set_per_process_memory_fraction(0.7, best_device) else: - devices = 'auto' + devices = "auto" default_batch_size = 2048 @@ -89,7 +93,21 @@ class Objective: train (float, optional): share of training data (first train %). Defaults to 0.7. val (float, optional): share of validation data (directly after train). Defaults to .15. """ - def __init__(self, filename, gauge, logdir, in_size=144, out_size=48, batch_size=2048, percentile=0.95, train=0.7, val=.15,monitor= 'hp/val_loss', **kwargs): + + def __init__( + self, + filename, + gauge, + logdir, + in_size=144, + out_size=48, + batch_size=2048, + percentile=0.95, + train=0.7, + val=0.15, + monitor="hp/val_loss", + **kwargs, + ): # Hold these implementation specific arguments as the fields of the class. self.filename = filename self.level_name_org = gauge @@ -103,19 +121,28 @@ class Objective: self.monitor = monitor def _get_callbacks(self, trial): - checkpoint_callback = ModelCheckpoint(save_top_k=1, monitor=self.monitor,save_weights_only=True) + checkpoint_callback = ModelCheckpoint( + save_top_k=1, monitor=self.monitor, save_weights_only=True + ) pruning_callback = OptunaPruningCallback(trial, monitor=self.monitor) - early_stop_callback = EarlyStopping(monitor=self.monitor, mode="min", patience=3) + early_stop_callback = EarlyStopping( + monitor=self.monitor, mode="min", patience=3 + ) # TODO: update when optuna support the new namespace # pruning_callback = PyTorchLightningPruningCallback(trial, monitor="hp/val_loss") my_callback = WaVoCallback() - return my_callback, [checkpoint_callback,pruning_callback, early_stop_callback, my_callback] + return my_callback, [ + checkpoint_callback, + pruning_callback, + early_stop_callback, + my_callback, + ] def _get_model_params(self,trial : optuna.Trial): #differencing = 0 differencing = trial.suggest_int("differencing", 0, 1) learning_rate = trial.suggest_float("lr", 0.00001, 0.01) - #optimizer = trial.suggest_categorical("optimizer", ["adam","adamw"]) + # optimizer = trial.suggest_categorical("optimizer", ["adam","adamw"]) optimizer = trial.suggest_categorical("optimizer", ["adam"]) embed_time = trial.suggest_categorical("embed_time", [True,False]) #model_architecture = trial.suggest_categorical("model_architecture", ["classic_lstm"]) @@ -131,73 +158,79 @@ class Objective: if model_architecture in ["chained_dense"]: model_params = dict( - activation=trial.suggest_categorical("activation", ["relu","gelu"]), + activation=trial.suggest_categorical("activation", ["relu", "gelu"]), hidden_size=trial.suggest_int("hidden_size", 32, 512), - dropout = trial.suggest_float("dropout", 0, 0.5), + dropout=trial.suggest_float("dropout", 0, 0.5), ) if model_architecture in ["tsmixer"]: model_params = dict( - norm_type=trial.suggest_categorical("norm_type", ["L","B"]), - activation=trial.suggest_categorical("activation", ["relu","gelu"]), + norm_type=trial.suggest_categorical("norm_type", ["L", "B"]), + activation=trial.suggest_categorical("activation", ["relu", "gelu"]), n_block=trial.suggest_int("n_block", 1, 8), - dropout = trial.suggest_float("dropout", 0, 0.5), + dropout=trial.suggest_float("dropout", 0, 0.5), ff_dim=trial.suggest_int("ff_dim", 32, 1024), ) - if model_architecture in ["classic_lstm","last_lstm"]: + if model_architecture in ["classic_lstm", "last_lstm"]: model_params = dict( - hidden_size_lstm = trial.suggest_int("hidden_size_lstm", 32, 512), - num_layers_lstm = trial.suggest_int("n_layers_lstm", 1, 3), - #num_layers_lstm = trial.suggest_int("n_layers_lstm", 1, 1), - hidden_size = trial.suggest_int("hidden_size", 32, 512), - num_layers = trial.suggest_int("n_layers", 2, 4), - #num_layers = trial.suggest_int("n_layers", 2, 2), - dropout = trial.suggest_float("dropout", 0, 0.5), - ) + hidden_size_lstm=trial.suggest_int("hidden_size_lstm", 32, 512), + num_layers_lstm=trial.suggest_int("n_layers_lstm", 1, 3), + # num_layers_lstm = trial.suggest_int("n_layers_lstm", 1, 1), + hidden_size=trial.suggest_int("hidden_size", 32, 512), + num_layers=trial.suggest_int("n_layers", 2, 4), + # num_layers = trial.suggest_int("n_layers", 2, 2), + dropout=trial.suggest_float("dropout", 0, 0.5), + ) - if model_architecture in ["transformer","autoformer","informer"]: - n_heads_temp = 8 #trial.suggest_int("n_heads", 8, 32) - d_model_temp = trial.suggest_int("d_model", 2*n_heads_temp, 256,2) #even number only, must be larger than n_heads (factor 2 is just guessing) + if model_architecture in ["transformer", "autoformer", "informer"]: + n_heads_temp = 8 # trial.suggest_int("n_heads", 8, 32) + d_model_temp = trial.suggest_int( + "d_model", 2 * n_heads_temp, 256, 2 + ) # even number only, must be larger than n_heads (factor 2 is just guessing) model_params = dict( - d_model = d_model_temp, - n_heads = n_heads_temp, - #output_attention=trial.suggest_categorical("output_attention", [True,False]), + d_model=d_model_temp, + n_heads=n_heads_temp, + # output_attention=trial.suggest_categorical("output_attention", [True,False]), e_layers=trial.suggest_int("e_layers", 1, 2), d_layers=trial.suggest_int("d_layers", 1, 1), d_ff=trial.suggest_int("d_ff", 32, 1024), - dropout = trial.suggest_float("dropout", 0, 0.2), - factor = trial.suggest_int("attn_factor", 1, 8), + dropout=trial.suggest_float("dropout", 0, 0.2), + factor=trial.suggest_int("attn_factor", 1, 8), output_attention=False, - #e_layers=2, - #d_layers=1, - activation=trial.suggest_categorical("activation", ["relu","gelu"]), - #embed_type=trial.suggest_categorical("embed", ["fixed","timeF","neural"]), - embed_type="timeF", #TODO remve - ) + # e_layers=2, + # d_layers=1, + activation=trial.suggest_categorical("activation", ["relu", "gelu"]), + # embed_type=trial.suggest_categorical("embed", ["fixed","timeF","neural"]), + embed_type="timeF", # TODO remve + ) if model_architecture == "autoformer": - model_params["moving_avg"] = trial.suggest_int("moving_avg",23,71,2) #uneven numbers only + model_params["moving_avg"] = trial.suggest_int( + "moving_avg", 23, 71, 2 + ) # uneven numbers only if model_architecture == "informer": - model_params["distil"] = trial.suggest_categorical("distil", [True,False]) + model_params["distil"] = trial.suggest_categorical( + "distil", [True, False] + ) if model_architecture == "dlinear": model_params = dict( - individual=trial.suggest_categorical("individual", [True,False]), - moving_avg = trial.suggest_int("moving_avg",23,71,2), #uneven numbers only + individual=trial.suggest_categorical("individual", [True, False]), + moving_avg=trial.suggest_int( + "moving_avg", 23, 71, 2 + ), # uneven numbers only ) return model_architecture, differencing, learning_rate,optimizer,embed_time, model_params def __call__(self, trial): - #def __call__(self, single_trial): Failed attempt at distributed training - #trial = optuna.integration.TorchDistributedTrial(single_trial) + # def __call__(self, single_trial): Failed attempt at distributed training + # trial = optuna.integration.TorchDistributedTrial(single_trial) model_architecture, differencing, learning_rate, optimizer,embed_time, model_params = self._get_model_params(trial) #TODO gradient clipping? #TODO to seed or not to seed? #pl.seed_everything(42, workers=True) - #prepare data - data_module = WaVoDataModule( str(self.filename), self.level_name_org, @@ -213,7 +246,7 @@ class Objective: embed_time=embed_time, ) - #prepare model + # prepare model model = WaVoLightningModule( model_architecture=model_architecture, feature_count=data_module.feature_count, @@ -228,47 +261,54 @@ class Objective: embed_time=data_module.hparams.embed_time, **model_params) - #find optimal batch_size - #if default_max_trials is None: + # find optimal batch_size + # if default_max_trials is None: # max_trials = 2 if model_architecture in ["transformer","autoformer"] else 5 - #else: + # else: # max_trials = default_max_trials - #logging.info("Trial %d Optimizing batch size with %d trials", trial.number, max_trials) + # logging.info("Trial %d Optimizing batch size with %d trials", trial.number, max_trials) logging.info("Params: %s", trial.params) - #logging.info("Before batch_size %.3f GB / %.3f GB", torch.cuda.mem_get_info(devices[0])[0]/1073741824, torch.cuda.mem_get_info(devices[0])[1]/1073741824) - #trainer_bs = pl.Trainer(logger=False,accelerator=accelerator,devices=devices,max_epochs=1,) - #tuner = Tuner(trainer_bs) - #tuner.scale_batch_size(model, datamodule=data_module,init_val=256,max_trials=max_trials) - #clear memory from optimizing the batch size - #tuner = None - #trainer_bs = None - #gc.collect() - #torch.cuda.empty_cache() - - logging.info("After batch_size %.3f GB / %.3f GB", torch.cuda.mem_get_info(devices[0])[0]/1073741824, torch.cuda.mem_get_info(devices[0])[1]/1073741824) - + # logging.info("Before batch_size %.3f GB / %.3f GB", torch.cuda.mem_get_info(devices[0])[0]/1073741824, torch.cuda.mem_get_info(devices[0])[1]/1073741824) + # trainer_bs = pl.Trainer(logger=False,accelerator=accelerator,devices=devices,max_epochs=1,) + # tuner = Tuner(trainer_bs) + # tuner.scale_batch_size(model, datamodule=data_module,init_val=256,max_trials=max_trials) + # clear memory from optimizing the batch size + # tuner = None + # trainer_bs = None + # gc.collect() + # torch.cuda.empty_cache() + + logging.info( + "After batch_size %.3f GB / %.3f GB", + torch.cuda.mem_get_info(devices[0])[0] / 1073741824, + torch.cuda.mem_get_info(devices[0])[1] / 1073741824, + ) my_callback, callbacks = self._get_callbacks(trial) logger = TensorBoardLogger(self.log_dir, default_hp_metric=False) - - trainer = pl.Trainer(default_root_dir=self.log_dir, - logger=logger, - accelerator=accelerator, - devices=devices, - callbacks=callbacks, - max_epochs=max_epochs, - #log_every_n_steps=int((len(data_module.train_set) / data_module.hparams.batch_size)/4),#TODO remove - log_every_n_steps=10, - ) + trainer = pl.Trainer( + default_root_dir=self.log_dir, + logger=logger, + accelerator=accelerator, + devices=devices, + callbacks=callbacks, + max_epochs=max_epochs, + # log_every_n_steps=int((len(data_module.train_set) / data_module.hparams.batch_size)/4),#TODO remove + log_every_n_steps=10, + ) trainer.fit(model, data_module) - logging.info("After trial %.3f GB / %.3f GB", torch.cuda.mem_get_info(devices[0])[0]/1073741824, torch.cuda.mem_get_info(devices[0])[1]/1073741824) + logging.info( + "After trial %.3f GB / %.3f GB", + torch.cuda.mem_get_info(devices[0])[0] / 1073741824, + torch.cuda.mem_get_info(devices[0])[1] / 1073741824, + ) - #save metrics to optuna + # save metrics to optuna model_path = str(Path(trainer.log_dir).resolve()) logging.info("model_path: %s", model_path) trial.set_user_attr("model_path", model_path) - for metric in ['hp/val_nse', 'hp/val_mae', 'hp/val_mae_flood']: + for metric in ["hp/val_nse", "hp/val_mae", "hp/val_mae_flood"]: for i in [23, 47]: trial.set_user_attr(f'{metric}_{i+1}', my_callback.metrics[metric][i].item()) @@ -277,23 +317,49 @@ class Objective: def parse_args() -> argparse.Namespace: - """ Parse all the arguments and provides some help in the command line - """ + """Parse all the arguments and provides some help in the command line""" parser: argparse.ArgumentParser = argparse.ArgumentParser( - description='Execute Hyperparameter optimization with optuna and torchlightning.') - parser.add_argument('filename', metavar='datafile', type=Path, - help='The path to your input data.') - parser.add_argument('gauge', metavar='gaugename', - type=str, help='The name of the gauge column.') - parser.add_argument('logdir', type=Path, - help='set a directory for logs and model checkpoints.') - parser.add_argument('trials', metavar='trials',type=int, default=n_trials,help='How many trials to run.') - - parser.add_argument('--expname', metavar='experiment_name',type=str, default='nameless',help='The name of the experiment.') - parser.add_argument('--storagename', metavar='storage_name',type=str, default=None,help='The database for the experiment.') - parser.add_argument('--architecture', metavar='architecture',type=str, default=None,help='Hyperparameter to decide an architecture.') + description="Execute Hyperparameter optimization with optuna and torchlightning." + ) + parser.add_argument( + "filename", metavar="datafile", type=Path, help="The path to your input data." + ) + parser.add_argument( + "gauge", metavar="gaugename", type=str, help="The name of the gauge column." + ) + parser.add_argument( + "logdir", type=Path, help="set a directory for logs and model checkpoints." + ) + parser.add_argument( + "trials", + metavar="trials", + type=int, + default=n_trials, + help="How many trials to run.", + ) + parser.add_argument( + "--expname", + metavar="experiment_name", + type=str, + default="nameless", + help="The name of the experiment.", + ) + parser.add_argument( + "--storagename", + metavar="storage_name", + type=str, + default=None, + help="The database for the experiment.", + ) + parser.add_argument( + "--architecture", + metavar="architecture", + type=str, + default=None, + help="Hyperparameter to decide an architecture.", + ) parser.add_argument( "--pruning", @@ -306,57 +372,80 @@ def parse_args() -> argparse.Namespace: return parser.parse_args() + def main(): - """Start a hyperparameter optimization with optuna and torchlightning. - """ + """Start a hyperparameter optimization with optuna and torchlightning.""" parsed_args = parse_args() if not parsed_args.logdir.exists(): parsed_args.logdir.mkdir(parents=True) - if parsed_args.pruning: - pruner = optuna.pruners.HyperbandPruner(min_resource=1, max_resource='auto', reduction_factor=3, bootstrap_count=0) + pruner = optuna.pruners.HyperbandPruner( + min_resource=1, max_resource="auto", reduction_factor=3, bootstrap_count=0 + ) else: pruner = optuna.pruners.NopPruner() - - - study_name = f"{parsed_args.filename.stem} {parsed_args.expname}" # Unique identifier of the study. - storage_name = default_storage_name if parsed_args.storagename is None else f"{storage_base}{parsed_args.storagename}.db" + study_name = f"{parsed_args.filename.stem} {parsed_args.expname}" # Unique identifier of the study. + storage_name = ( + default_storage_name + if parsed_args.storagename is None + else f"{storage_base}{parsed_args.storagename}.db" + ) # Logging, add stream handler of stdout to show the messages logging.basicConfig(level=logging.INFO) - logFormatter = logging.Formatter('%(asctime)s;%(levelname)s;%(message)s',datefmt='%Y-%m-%d %H:%M:%S') - fileHandler = logging.FileHandler(parsed_args.logdir / 'optuna.log',) + logFormatter = logging.Formatter( + "%(asctime)s;%(levelname)s;%(message)s", datefmt="%Y-%m-%d %H:%M:%S" + ) + fileHandler = logging.FileHandler( + parsed_args.logdir / "optuna.log", + ) consoleHandler = logging.StreamHandler(sys.stdout) fileHandler.setFormatter(logFormatter) consoleHandler.setFormatter(logFormatter) logging.getLogger().addHandler(fileHandler) - #logging.getLogger().addHandler(consoleHandler) + # logging.getLogger().addHandler(consoleHandler) optuna.logging.get_logger("optuna").addHandler(fileHandler) optuna.logging.get_logger("optuna").addHandler(consoleHandler) - logging.info('Start of this execution======================================================================') - logging.info("Executing %s with device %s and parameters %s ",sys.argv[0],devices,sys.argv[1:]) - + logging.info( + "Start of this execution======================================================================" + ) + logging.info( + "Executing %s with device %s and parameters %s ", + sys.argv[0], + devices, + sys.argv[1:], + ) study = optuna.create_study( study_name=study_name, storage=storage_name, direction="minimize", pruner=pruner, - load_if_exists=True) + load_if_exists=True, + ) study.set_metric_names(["hp/val_loss"]) - objective = Objective(**vars(parsed_args),batch_size=1024)#TODO REMOVE batch_size + objective = Objective( + **vars(parsed_args), batch_size=1024 + ) # TODO REMOVE batch_size if parsed_args.architecture is not None: for _ in range(parsed_args.trials): study.enqueue_trial({"model_architecture": parsed_args.architecture}) - study.optimize(objective, n_trials=parsed_args.trials, gc_after_trial=True,timeout=None,callbacks=[lambda study, trial: torch.cuda.empty_cache()]) + study.optimize( + objective, + n_trials=parsed_args.trials, + gc_after_trial=True, + timeout=None, + callbacks=[lambda study, trial: torch.cuda.empty_cache()], + ) + if __name__ == "__main__": main() - #python main_hyper.py ../../../../data-project/KIWaVo/data/input/mergedPreetzAll.csv Preetz_pegel_cm ../../../../data-project/KIWaVo/models/lfu/preetz/ 100 --expname lstms_1 --storagename lfu + # python main_hyper.py ../../../../data-project/KIWaVo/data/input/mergedPreetzAll.csv Preetz_pegel_cm ../../../../data-project/KIWaVo/models/lfu/preetz/ 100 --expname lstms_1 --storagename lfu diff --git a/src/ensemble_models.py b/src/models/ensemble_models.py similarity index 100% rename from src/ensemble_models.py rename to src/models/ensemble_models.py diff --git a/src/models/lightning_module.py b/src/models/lightning_module.py index 0b575b282a33d656d7d4c0c593ed5d9506165b85..2d987a75f06cff996c759972bab514941d36fb53 100644 --- a/src/models/lightning_module.py +++ b/src/models/lightning_module.py @@ -150,7 +150,8 @@ class WaVoLightningModule(pl.LightningModule): x_org = self.scaler.inverse_transform(x.reshape(-1, self.feature_count).cpu()).reshape(-1,self.in_size,self.feature_count) x_base = x_org[:,-1,self.gauge_idx].round(2) #round to get rid of floating point errors, the measure value is always an integer or has at most 1 decimal x_base = torch.from_numpy(x_base).unsqueeze(1) - pred = x_base.cuda() + pred.cumsum(dim=1) + x_base = x_base.to(pred.device) + pred = x_base + pred.cumsum(dim=1) return pred diff --git a/src/example_sarah.py b/src/old/example_sarah.py similarity index 55% rename from src/example_sarah.py rename to src/old/example_sarah.py index 696ab285c6c0ed41c961ebebb47e270421c3a8b0..b27c9e5acafa56139b1977edbb9c06244c812dc8 100644 --- a/src/example_sarah.py +++ b/src/old/example_sarah.py @@ -1,6 +1,7 @@ """ Main Module to start training """ + import argparse import logging import sys @@ -14,9 +15,10 @@ from lightning.pytorch.loggers import TensorBoardLogger from models.lstms import LSTM_LAST import utils.utility as ut -from callbacks import OptunaPruningCallback, WaVoCallback -#from data_tools.data_module import WaVoDataModule -#from models.lightning_module import WaVoLightningModule +from utils.callbacks import OptunaPruningCallback, WaVoCallback + +# from data_tools.data_module import WaVoDataModule +# from models.lightning_module import WaVoLightningModule from datetime import datetime import pickle @@ -28,25 +30,24 @@ from sklearn.preprocessing import StandardScaler from torch.utils.data import DataLoader import torch.nn as nn import torch.optim as optim -from data_tools.datasets import TimeSeriesDataSet # pylint: disable=import-error -import utils.utility as ut # pylint: disable=import-error +from data_tools.datasets import TimeSeriesDataSet # pylint: disable=import-error +import utils.utility as ut # pylint: disable=import-error use_cuda = torch.cuda.is_available() if use_cuda: - accelerator = 'cuda' - torch.set_float32_matmul_precision('high') + accelerator = "cuda" + torch.set_float32_matmul_precision("high") devices = [0] else: - accelerator = 'cpu' + accelerator = "cpu" devices = None # CONSTANTS -storage_base = 'sqlite:///../../../../data-project/KIWaVo/sarah/optuna/' +storage_base = "sqlite:///../../../../data-project/KIWaVo/sarah/optuna/" max_epochs = 200 n_trials = 3 - class WaVoLightningModule(pl.LightningModule): """LightningModule that defines the training and other steps, logging and the neural network. @@ -82,16 +83,24 @@ class WaVoLightningModule(pl.LightningModule): """ # pylint: disable-next=unused-argument - def __init__(self,feature_count, in_size, out_size,learning_rate,optimizer,scaler,target_idx,**kwargs): + def __init__( + self, + feature_count, + in_size, + out_size, + learning_rate, + optimizer, + scaler, + target_idx, + **kwargs, + ): super().__init__() + self.save_hyperparameters(ignore=["scaler"]) + # self.save_hyperparameters({"scaler": pickle.dumps(self.scaler),}) - - self.save_hyperparameters(ignore=['scaler']) - #self.save_hyperparameters({"scaler": pickle.dumps(self.scaler),}) - - if 'timestamp' in kwargs: #TODO test if this works - self.timestamp = kwargs['timestamp'] + if "timestamp" in kwargs: # TODO test if this works + self.timestamp = kwargs["timestamp"] else: self.timestamp = datetime.now().strftime("%Y%m%d-%H%M%S") self.save_hyperparameters({"timestamp": self.timestamp}) @@ -103,7 +112,6 @@ class WaVoLightningModule(pl.LightningModule): self.target_idx = target_idx self.model = self.create_model(**kwargs) - # pylint: disable-next=arguments-differ def forward(self, x): return self.model(x) @@ -118,7 +126,9 @@ class WaVoLightningModule(pl.LightningModule): # pylint: disable-next=unused-argument, arguments-differ def validation_step(self, batch, batch_idx): val_loss = self._common_step(batch) - self.log("hp/val_loss", val_loss, sync_dist=True)# syncdist needed for optuna hyperparameter optimization to work + self.log( + "hp/val_loss", val_loss, sync_dist=True + ) # syncdist needed for optuna hyperparameter optimization to work # pylint: disable-next=unused-argument, arguments-differ def test_step(self, batch, batch_idx): @@ -127,21 +137,25 @@ class WaVoLightningModule(pl.LightningModule): def predict_step(self, batch, batch_idx, dataloader_idx=0): x = batch[0] - #This is not great, but necessary if i want to use a scaler. + # This is not great, but necessary if i want to use a scaler. pred = self.model(x) - pred = ut.inv_standard(pred, self.scaler.mean_[self.target_idx], self.scaler.scale_[self.target_idx]) + pred = ut.inv_standard( + pred, + self.scaler.mean_[self.target_idx], + self.scaler.scale_[self.target_idx], + ) return pred - def _common_step(self,batch): + def _common_step(self, batch): x, y = batch y_hat = self.model(x) - loss = nn.functional.mse_loss(y_hat, y) #TODO different loss functions + loss = nn.functional.mse_loss(y_hat, y) # TODO different loss functions return loss def configure_optimizers(self): - if 'optimizer' not in self.hparams or self.hparams.optimizer == 'adam': + if "optimizer" not in self.hparams or self.hparams.optimizer == "adam": optimizer = optim.Adam(self.parameters(), lr=self.hparams.learning_rate) - elif self.hparams.optimizer =='adamw': + elif self.hparams.optimizer == "adamw": optimizer = optim.AdamW(self.parameters(), lr=self.hparams.learning_rate) return optimizer @@ -153,7 +167,7 @@ class WaVoLightningModule(pl.LightningModule): """ return self.model - def create_model(self,**kwargs) -> nn.Module: + def create_model(self, **kwargs) -> nn.Module: """Creates the neural network model and passes the kwargs to the model. Returns: @@ -163,7 +177,9 @@ class WaVoLightningModule(pl.LightningModule): feature_count=self.feature_count, in_size=self.in_size, out_size=self.out_size, - **kwargs) + **kwargs, + ) + class WaVoDataModule(pl.LightningDataModule): """ @@ -181,15 +197,19 @@ class WaVoDataModule(pl.LightningDataModule): scaler (StandardScaler, optional): Scaler to use for input data. Only given when the DataModule is created for prediction. Defaults to None. """ - def __init__(self, filename, level_name_org, - batch_size=128, - in_size=144, - out_size=48, - percentile=0.95, - train=0.7, - val=.15, - scaler=None, - **kwargs) -> None: + def __init__( + self, + filename, + level_name_org, + batch_size=128, + in_size=144, + out_size=48, + percentile=0.95, + train=0.7, + val=0.15, + scaler=None, + **kwargs, + ) -> None: super().__init__() self.filename = str(filename) self.target_column = level_name_org @@ -200,57 +220,75 @@ class WaVoDataModule(pl.LightningDataModule): self.train = train self.val = val self.scaler = scaler - #maybe tidy up the next 4 lines - self.threshold = kwargs['threshold'] if 'threshold' in kwargs else None - self.feature_count = kwargs['feature_count'] if 'feature_count' in kwargs else None + # maybe tidy up the next 4 lines + self.threshold = kwargs["threshold"] if "threshold" in kwargs else None + self.feature_count = ( + kwargs["feature_count"] if "feature_count" in kwargs else None + ) self.df = self._load_df() - self.target_idx =self.df.columns.get_loc(self.target_column) + self.target_idx = self.df.columns.get_loc(self.target_column) # if we instantiate the object from scratch we do not have these values. If we load it with the config/hparams from a model we do. - if all(attr is None for attr in [self.scaler,self.threshold,self.feature_count]): + if all( + attr is None for attr in [self.scaler, self.threshold, self.feature_count] + ): self._set_threshold() df = self.df.copy() # drop the first value, it's nan if differencing, but for comparing metrics we need to always drop it - val_idx = int(self.train*len(df)) - test_idx = int(val_idx + self.val*len(df)) + val_idx = int(self.train * len(df)) + test_idx = int(val_idx + self.val * len(df)) - df_train= df[:val_idx] + df_train = df[:val_idx] self.feature_count = df_train.shape[-1] self.scaler = StandardScaler() self.scaler.fit(df_train) - self.save_hyperparameters(ignore=['scaler','threshold','feature_count']) - self.save_hyperparameters({ - "scaler": pickle.dumps(self.scaler), - "threshold": self.threshold, - #These aren't really hyperparameters, but maybe useful to have them logged - "train_start":str(df.index[0]), - "val_start":str(df.index[val_idx]), - "test_start":str(df.index[test_idx]), - }) - - elif any(attr is None for attr in [self.scaler, self.threshold,self.feature_count]): - raise ValueError("If you provide a scaler, you also need to provide threshold and feature_count.") + self.save_hyperparameters(ignore=["scaler", "threshold", "feature_count"]) + self.save_hyperparameters( + { + "scaler": pickle.dumps(self.scaler), + "threshold": self.threshold, + # These aren't really hyperparameters, but maybe useful to have them logged + "train_start": str(df.index[0]), + "val_start": str(df.index[val_idx]), + "test_start": str(df.index[test_idx]), + } + ) + + elif any( + attr is None for attr in [self.scaler, self.threshold, self.feature_count] + ): + raise ValueError( + "If you provide a scaler, you also need to provide threshold and feature_count." + ) else: - self.save_hyperparameters(ignore=['scaler']) + self.save_hyperparameters(ignore=["scaler"]) self.save_hyperparameters({"scaler": pickle.dumps(self.scaler)}) - self.df_scaled = pd.DataFrame(self.scaler.transform(self.df),index=self.df.index,columns=self.df.columns) + self.df_scaled = pd.DataFrame( + self.scaler.transform(self.df), index=self.df.index, columns=self.df.columns + ) def _load_df(self): df = pd.read_csv(self.filename, index_col=0, parse_dates=True) - if not isinstance(df.index,pd.core.indexes.datetimes.DatetimeIndex): - df = pd.read_csv(self.filename,index_col=0,parse_dates=True,date_format='%d.%m.%Y %H:00') + if not isinstance(df.index, pd.core.indexes.datetimes.DatetimeIndex): + df = pd.read_csv( + self.filename, + index_col=0, + parse_dates=True, + date_format="%d.%m.%Y %H:00", + ) df = ut.fill_missing_values(df) return df def _set_threshold(self): if 0 < self.percentile < 1: - self.threshold = self.df[self.target_column].quantile( - self.percentile).item() + self.threshold = ( + self.df[self.target_column].quantile(self.percentile).item() + ) else: self.threshold = self.percentile @@ -260,29 +298,54 @@ class WaVoDataModule(pl.LightningDataModule): # I don't really do setup i guess? # That train_set objects etc don't have a state, so no need to reload them or anything. # Additionally, i need all of them in my custom callback so i'll just load them once in the beginning and then ignore the setup method - if not hasattr(self,'train_set'): + if not hasattr(self, "train_set"): df_len = len(self.df) - val_idx = int(self.train*df_len) - test_idx = int(val_idx + self.val*df_len) - self.train_set = self.make_dataset(1,val_idx+1) - self.val_set = self.make_dataset(val_idx+1,test_idx+1) - self.test_set = self.make_dataset(test_idx+1) - + val_idx = int(self.train * df_len) + test_idx = int(val_idx + self.val * df_len) + self.train_set = self.make_dataset(1, val_idx + 1) + self.val_set = self.make_dataset(val_idx + 1, test_idx + 1) + self.test_set = self.make_dataset(test_idx + 1) def train_dataloader(self): - return DataLoader(self.train_set, batch_size=self.hparams.batch_size, shuffle=True, num_workers=8,persistent_workers=True) + return DataLoader( + self.train_set, + batch_size=self.hparams.batch_size, + shuffle=True, + num_workers=8, + persistent_workers=True, + ) def val_dataloader(self): - return DataLoader(self.val_set, batch_size=self.hparams.batch_size, shuffle=False, num_workers=8,persistent_workers=True) + return DataLoader( + self.val_set, + batch_size=self.hparams.batch_size, + shuffle=False, + num_workers=8, + persistent_workers=True, + ) def test_dataloader(self): - return DataLoader(self.test_set, batch_size=self.hparams.batch_size, shuffle=False, num_workers=8,persistent_workers=True) + return DataLoader( + self.test_set, + batch_size=self.hparams.batch_size, + shuffle=False, + num_workers=8, + persistent_workers=True, + ) def predict_dataloader(self): # return the trainset, but sorted - return DataLoader(self.train_set, batch_size=self.hparams.batch_size, shuffle=False, num_workers=8,persistent_workers=True) + return DataLoader( + self.train_set, + batch_size=self.hparams.batch_size, + shuffle=False, + num_workers=8, + persistent_workers=True, + ) - def make_dataset(self,start:int,end:int=None,exog=False) -> TimeSeriesDataSet: + def make_dataset( + self, start: int, end: int = None, exog=False + ) -> TimeSeriesDataSet: """Makes a TimeseriesDataset from the given range of the available data. Uses values from self.df_scaled. @@ -296,11 +359,9 @@ class WaVoDataModule(pl.LightningDataModule): x = torch.Tensor(self.df_scaled[start:end].values) y = torch.Tensor(self.df_scaled[self.target_column][start:end].values) - #y = torch.Tensor(self.df_y[start:end].values[:,0]) - - return TimeSeriesDataSet(x,y,self.in_size,self.out_size) - + # y = torch.Tensor(self.df_y[start:end].values[:,0]) + return TimeSeriesDataSet(x, y, self.in_size, self.out_size) class Objective: @@ -319,7 +380,21 @@ class Objective: val (float, optional): share of validation data (directly after train). Defaults to .15. monitor (str, optional): metric to monitor. Defaults to 'hp/val_loss'. """ - def __init__(self, filename, gauge, logdir, in_size=144, out_size=48, batch_size=2048, percentile=0.95, train=0.7, val=.15,monitor= 'hp/val_loss', **kwargs): + + def __init__( + self, + filename, + gauge, + logdir, + in_size=144, + out_size=48, + batch_size=2048, + percentile=0.95, + train=0.7, + val=0.15, + monitor="hp/val_loss", + **kwargs, + ): # Hold these implementation specific arguments as the fields of the class. self.filename = filename self.level_name_org = gauge @@ -333,36 +408,45 @@ class Objective: self.monitor = monitor def _get_callbacks(self, trial): - checkpoint_callback = ModelCheckpoint(save_top_k=1, monitor=self.monitor,save_weights_only=True) + checkpoint_callback = ModelCheckpoint( + save_top_k=1, monitor=self.monitor, save_weights_only=True + ) pruning_callback = OptunaPruningCallback(trial, monitor=self.monitor) - early_stop_callback = EarlyStopping(monitor=self.monitor, mode="min", patience=3) + early_stop_callback = EarlyStopping( + monitor=self.monitor, mode="min", patience=3 + ) my_callback = WaVoCallback() - return my_callback, [checkpoint_callback,pruning_callback, early_stop_callback, my_callback] - - def _get_model_params(self,trial): + return my_callback, [ + checkpoint_callback, + pruning_callback, + early_stop_callback, + my_callback, + ] + + def _get_model_params(self, trial): learning_rate = trial.suggest_float("lr", 0.00001, 0.01) optimizer = trial.suggest_categorical("optimizer", ["adam"]) model_params = dict( - hidden_size_lstm = trial.suggest_int("hidden_size_lstm", 32, 512), - num_layers_lstm = trial.suggest_int("n_layers_lstm", 1, 3), - hidden_size = trial.suggest_int("hidden_size", 32, 512), - num_layers = trial.suggest_int("n_layers", 2, 4), - dropout = trial.suggest_float("dropout", 0, 0.5), - ) - - return learning_rate,optimizer, model_params + hidden_size_lstm=trial.suggest_int("hidden_size_lstm", 32, 512), + num_layers_lstm=trial.suggest_int("n_layers_lstm", 1, 3), + hidden_size=trial.suggest_int("hidden_size", 32, 512), + num_layers=trial.suggest_int("n_layers", 2, 4), + dropout=trial.suggest_float("dropout", 0, 0.5), + ) + + return learning_rate, optimizer, model_params def __call__(self, trial): - #def __call__(self, single_trial): Failed attempt at distributed training - #trial = optuna.integration.TorchDistributedTrial(single_trial) + # def __call__(self, single_trial): Failed attempt at distributed training + # trial = optuna.integration.TorchDistributedTrial(single_trial) learning_rate, optimizer, model_params = self._get_model_params(trial) - #TODO gradient clipping? - #TODO to seed or not to seed? - #pl.seed_everything(42, workers=True) + # TODO gradient clipping? + # TODO to seed or not to seed? + # pl.seed_everything(42, workers=True) - #prepare data (This could maybe be done in init?) + # prepare data (This could maybe be done in init?) data_module = WaVoDataModule( str(self.filename), self.level_name_org, @@ -374,7 +458,7 @@ class Objective: val=self.val, ) - #prepare model + # prepare model model = WaVoLightningModule( feature_count=data_module.feature_count, in_size=data_module.hparams.in_size, @@ -383,57 +467,85 @@ class Objective: target_idx=data_module.target_idx, learning_rate=learning_rate, optimizer=optimizer, - **model_params) + **model_params, + ) logging.info("Params: %s", trial.params) - #logging.info("After batch_size %.3f GB / %.3f GB", torch.cuda.mem_get_info(devices[0])[0]/1073741824, torch.cuda.mem_get_info(devices[0])[1]/1073741824) - + # logging.info("After batch_size %.3f GB / %.3f GB", torch.cuda.mem_get_info(devices[0])[0]/1073741824, torch.cuda.mem_get_info(devices[0])[1]/1073741824) my_callback, callbacks = self._get_callbacks(trial) logger = TensorBoardLogger(self.log_dir, default_hp_metric=False) - - trainer = pl.Trainer(default_root_dir=self.log_dir, - logger=logger, - accelerator=accelerator, - devices=devices, - callbacks=callbacks, - max_epochs=max_epochs, - #log_every_n_steps=int((len(data_module.train_set) / data_module.hparams.batch_size)/4),#TODO remove - log_every_n_steps=10, - ) + trainer = pl.Trainer( + default_root_dir=self.log_dir, + logger=logger, + accelerator=accelerator, + devices=devices, + callbacks=callbacks, + max_epochs=max_epochs, + # log_every_n_steps=int((len(data_module.train_set) / data_module.hparams.batch_size)/4),#TODO remove + log_every_n_steps=10, + ) trainer.fit(model, data_module) - #logging.info("After trial %.3f GB / %.3f GB", torch.cuda.mem_get_info(devices[0])[0]/1073741824, torch.cuda.mem_get_info(devices[0])[1]/1073741824) + # logging.info("After trial %.3f GB / %.3f GB", torch.cuda.mem_get_info(devices[0])[0]/1073741824, torch.cuda.mem_get_info(devices[0])[1]/1073741824) - #save metrics to optuna + # save metrics to optuna model_path = str(Path(trainer.log_dir).resolve()) logging.info("model_path: %s", model_path) trial.set_user_attr("model_path", model_path) - for metric in ['hp/val_nse', 'hp/val_mae', 'hp/val_mae_flood']: + for metric in ["hp/val_nse", "hp/val_mae", "hp/val_mae_flood"]: for i in [23, 47]: - trial.set_user_attr(f'{metric}_{i}', my_callback.metrics[metric][i].item()) + trial.set_user_attr( + f"{metric}_{i}", my_callback.metrics[metric][i].item() + ) return my_callback.metrics[self.monitor].item() def parse_args() -> argparse.Namespace: - """ Parse all the arguments and provides some help in the command line - """ + """Parse all the arguments and provides some help in the command line""" parser: argparse.ArgumentParser = argparse.ArgumentParser( - description='Execute Hyperparameter optimization with optuna and torchlightning.') - parser.add_argument('filename', metavar='datafile', type=Path, - help='The path to your input data.') - parser.add_argument('gauge', metavar='gaugename', - type=str, help='The name of the gauge column.') - parser.add_argument('logdir', type=Path, - help='set a directory for logs and model checkpoints.') - parser.add_argument('trials', metavar='trials',type=int, default=n_trials,help='How many trials to run.') - - parser.add_argument('--expname', metavar='experiment_name',type=str, default='nameless',help='The name of the experiment.') - parser.add_argument('--storagename', metavar='storage_name',type=str, default=None,help='The database for the experiment.') - parser.add_argument('--architecture', metavar='architecture',type=str, default=None,help='Hyperparameter to decide an architecture.') + description="Execute Hyperparameter optimization with optuna and torchlightning." + ) + parser.add_argument( + "filename", metavar="datafile", type=Path, help="The path to your input data." + ) + parser.add_argument( + "gauge", metavar="gaugename", type=str, help="The name of the gauge column." + ) + parser.add_argument( + "logdir", type=Path, help="set a directory for logs and model checkpoints." + ) + parser.add_argument( + "trials", + metavar="trials", + type=int, + default=n_trials, + help="How many trials to run.", + ) + parser.add_argument( + "--expname", + metavar="experiment_name", + type=str, + default="nameless", + help="The name of the experiment.", + ) + parser.add_argument( + "--storagename", + metavar="storage_name", + type=str, + default=None, + help="The database for the experiment.", + ) + parser.add_argument( + "--architecture", + metavar="architecture", + type=str, + default=None, + help="Hyperparameter to decide an architecture.", + ) parser.add_argument( "--pruning", @@ -446,57 +558,79 @@ def parse_args() -> argparse.Namespace: return parser.parse_args() + def main(): - """Start a hyperparameter optimization with optuna and torchlightning. - """ + """Start a hyperparameter optimization with optuna and torchlightning.""" parsed_args = parse_args() if not parsed_args.logdir.exists(): parsed_args.logdir.mkdir(parents=True) - if parsed_args.pruning: - pruner = optuna.pruners.HyperbandPruner(min_resource=1, max_resource='auto', reduction_factor=3, bootstrap_count=0) + pruner = optuna.pruners.HyperbandPruner( + min_resource=1, max_resource="auto", reduction_factor=3, bootstrap_count=0 + ) else: pruner = optuna.pruners.NopPruner() - - - study_name = f"{parsed_args.filename.stem} {parsed_args.expname}" # Unique identifier of the study. - storage_name = f"{storage_base}default.db" if parsed_args.storagename is None else f"{storage_base}{parsed_args.storagename}.db" + study_name = f"{parsed_args.filename.stem} {parsed_args.expname}" # Unique identifier of the study. + storage_name = ( + f"{storage_base}default.db" + if parsed_args.storagename is None + else f"{storage_base}{parsed_args.storagename}.db" + ) # Logging, add stream handler of stdout to show the messages logging.basicConfig(level=logging.INFO) - logFormatter = logging.Formatter('%(asctime)s;%(levelname)s;%(message)s',datefmt='%Y-%m-%d %H:%M:%S') - fileHandler = logging.FileHandler(parsed_args.logdir / 'optuna.log',) + logFormatter = logging.Formatter( + "%(asctime)s;%(levelname)s;%(message)s", datefmt="%Y-%m-%d %H:%M:%S" + ) + fileHandler = logging.FileHandler( + parsed_args.logdir / "optuna.log", + ) consoleHandler = logging.StreamHandler(sys.stdout) fileHandler.setFormatter(logFormatter) consoleHandler.setFormatter(logFormatter) logging.getLogger().addHandler(fileHandler) - #logging.getLogger().addHandler(consoleHandler) + # logging.getLogger().addHandler(consoleHandler) optuna.logging.get_logger("optuna").addHandler(fileHandler) optuna.logging.get_logger("optuna").addHandler(consoleHandler) - logging.info('Start of this execution======================================================================') - logging.info("Executing %s with device %s and parameters %s ",sys.argv[0],devices,sys.argv[1:]) - + logging.info( + "Start of this execution======================================================================" + ) + logging.info( + "Executing %s with device %s and parameters %s ", + sys.argv[0], + devices, + sys.argv[1:], + ) study = optuna.create_study( study_name=study_name, storage=storage_name, direction="minimize", pruner=pruner, - load_if_exists=True) + load_if_exists=True, + ) study.set_metric_names(["hp/val_loss"]) - objective = Objective(**vars(parsed_args),batch_size=1024, gc_after_trial=True)#TODO REMOVE batch_size + objective = Objective( + **vars(parsed_args), batch_size=1024, gc_after_trial=True + ) # TODO REMOVE batch_size if parsed_args.architecture is not None: for _ in range(parsed_args.trials): study.enqueue_trial({"model_architecture": parsed_args.architecture}) - study.optimize(objective, n_trials=parsed_args.trials, timeout=None,callbacks=[lambda study, trial: torch.cuda.empty_cache()]) + study.optimize( + objective, + n_trials=parsed_args.trials, + timeout=None, + callbacks=[lambda study, trial: torch.cuda.empty_cache()], + ) + if __name__ == "__main__": main() - #python main_hyper.py ../../../../data-project/KIWaVo/data/input/mergedPreetzAll.csv Preetz_pegel_cm ../../../../data-project/KIWaVo/models/lfu/preetz/ 100 --expname lstms_1 --storagename lfu + # python main_hyper.py ../../../../data-project/KIWaVo/data/input/mergedPreetzAll.csv Preetz_pegel_cm ../../../../data-project/KIWaVo/models/lfu/preetz/ 100 --expname lstms_1 --storagename lfu diff --git a/src/exp_icaart.py b/src/old/exp_icaart.py similarity index 50% rename from src/exp_icaart.py rename to src/old/exp_icaart.py index a608f93b5ea09ac3a1b31e0449177ddad0bf670a..7de5748c69618c2f99d73d12913a5e3c3d4a3828 100644 --- a/src/exp_icaart.py +++ b/src/old/exp_icaart.py @@ -1,6 +1,7 @@ """ Main Module to start training """ + import argparse import gc import logging @@ -14,33 +15,37 @@ from lightning.pytorch.callbacks import EarlyStopping, ModelCheckpoint from lightning.pytorch.loggers import TensorBoardLogger import utils.utility as ut -from callbacks import OptunaPruningCallback, WaVoCallback +from utils.callbacks import OptunaPruningCallback, WaVoCallback from data_tools.data_module import WaVoDataModule from models.lightning_module import WaVoLightningModule use_cuda = torch.cuda.is_available() if use_cuda: - accelerator = 'cuda' - #accelerator = 'gpu' - torch.set_float32_matmul_precision('high') + accelerator = "cuda" + # accelerator = 'gpu' + torch.set_float32_matmul_precision("high") else: - accelerator = 'cpu' + accelerator = "cpu" # CONSTANTS -#storage_base = 'sqlite:///../../models_torch/optuna/' -storage_base = 'sqlite:///../../../../data-project/KIWaVo/models/optuna/' +# storage_base = 'sqlite:///../../models_torch/optuna/' +storage_base = "sqlite:///../../../../data-project/KIWaVo/models/optuna/" if ut.debugger_is_active(): - #default_storage_name = 'sqlite:///../models_torch/optuna/debug_01.db' - default_storage_name = 'sqlite:///../../../data-project/KIWaVo/models/optuna/debug_icaart.db' + # default_storage_name = 'sqlite:///../models_torch/optuna/debug_01.db' + default_storage_name = ( + "sqlite:///../../../data-project/KIWaVo/models/optuna/debug_icaart.db" + ) n_trials = 2 max_epochs = 2 default_max_trials = 1 else: - #default_storage_name = 'sqlite:///../../models_torch/optuna/optimization_01.db' - default_storage_name = 'sqlite:///../../../../data-project/KIWaVo/models/optuna/icaart_01.db' + # default_storage_name = 'sqlite:///../../models_torch/optuna/optimization_01.db' + default_storage_name = ( + "sqlite:///../../../../data-project/KIWaVo/models/optuna/icaart_01.db" + ) n_trials = 100 max_epochs = 2000 default_max_trials = None @@ -50,14 +55,14 @@ if use_cuda: max_free = 0 best_device = None for j in range(torch.cuda.device_count()): - free , _ = torch.cuda.mem_get_info(j) + free, _ = torch.cuda.mem_get_info(j) if free > max_free: max_free = free best_device = j devices = [best_device] else: - devices = 'auto' + devices = "auto" class Objective: @@ -76,7 +81,21 @@ class Objective: val (float, optional): share of validation data (directly after train). Defaults to .15. monitor (str, optional): metric to monitor. Defaults to 'hp/val_loss'. """ - def __init__(self, filename, gauge, logdir, in_size=144, out_size=48, batch_size=2048, percentile=0.95, train=0.7, val=.15,monitor= 'hp/val_loss', **kwargs): + + def __init__( + self, + filename, + gauge, + logdir, + in_size=144, + out_size=48, + batch_size=2048, + percentile=0.95, + train=0.7, + val=0.15, + monitor="hp/val_loss", + **kwargs, + ): # Hold these implementation specific arguments as the fields of the class. self.filename = filename self.level_name_org = gauge @@ -90,31 +109,44 @@ class Objective: self.monitor = monitor def _get_callbacks(self, trial): - checkpoint_callback = ModelCheckpoint(save_top_k=1, monitor=self.monitor,save_weights_only=True) + checkpoint_callback = ModelCheckpoint( + save_top_k=1, monitor=self.monitor, save_weights_only=True + ) pruning_callback = OptunaPruningCallback(trial, monitor=self.monitor) - early_stop_callback = EarlyStopping(monitor=self.monitor, mode="min", patience=3) + early_stop_callback = EarlyStopping( + monitor=self.monitor, mode="min", patience=3 + ) my_callback = WaVoCallback() - return my_callback, [checkpoint_callback,pruning_callback, early_stop_callback, my_callback] + return my_callback, [ + checkpoint_callback, + pruning_callback, + early_stop_callback, + my_callback, + ] - def _get_model_params(self,trial): + def _get_model_params(self, trial): differencing = 0 learning_rate = 0.001 - model_architecture = 'last_lstm' + model_architecture = "last_lstm" model_params = dict( - hidden_size_lstm = trial.suggest_categorical("hidden_size_lstm", [64, 128, 256]), - num_layers_lstm = trial.suggest_categorical("n_layers_lstm", [1,2,3]), - hidden_size = trial.suggest_categorical("hidden_size", [128, 256]), - num_layers = trial.suggest_categorical("n_layers", [2,3]), - dropout = 0.25 - ) + hidden_size_lstm=trial.suggest_categorical( + "hidden_size_lstm", [64, 128, 256] + ), + num_layers_lstm=trial.suggest_categorical("n_layers_lstm", [1, 2, 3]), + hidden_size=trial.suggest_categorical("hidden_size", [128, 256]), + num_layers=trial.suggest_categorical("n_layers", [2, 3]), + dropout=0.25, + ) return model_architecture, differencing, learning_rate, model_params def __call__(self, trial): - model_architecture, differencing, learning_rate, model_params = self._get_model_params(trial) - #prepare data + model_architecture, differencing, learning_rate, model_params = ( + self._get_model_params(trial) + ) + # prepare data data_module = WaVoDataModule( str(self.filename), self.level_name_org, @@ -126,10 +158,14 @@ class Objective: train=self.train, val=self.val, model_architecture=model_architecture, - time_enc=model_params['embed_type'] if model_architecture in ["transformer","autoformer"] else 'fixed', + time_enc=( + model_params["embed_type"] + if model_architecture in ["transformer", "autoformer"] + else "fixed" + ), ) - #prepare model + # prepare model model = WaVoLightningModule( model_architecture=model_architecture, feature_count=data_module.feature_count, @@ -138,49 +174,81 @@ class Objective: scaler=data_module.scaler, target_idx=data_module.target_idx, learning_rate=learning_rate, - **model_params) + **model_params, + ) logging.info("Params: %s", trial.params) - my_callback, callbacks = self._get_callbacks(trial) logger = TensorBoardLogger(self.log_dir, default_hp_metric=False) - - trainer = pl.Trainer(default_root_dir=self.log_dir, - logger=logger, - accelerator=accelerator, - devices=devices, - callbacks=callbacks, - max_epochs=max_epochs, - log_every_n_steps=3, - ) + trainer = pl.Trainer( + default_root_dir=self.log_dir, + logger=logger, + accelerator=accelerator, + devices=devices, + callbacks=callbacks, + max_epochs=max_epochs, + log_every_n_steps=3, + ) trainer.fit(model, data_module) - #save metrics to optuna + # save metrics to optuna model_path = str(Path(trainer.log_dir).resolve()) logging.info("model_path: %s", model_path) trial.set_user_attr("model_path", model_path) - for metric in ['hp/val_nse', 'hp/val_mae', 'hp/val_mae_flood']: + for metric in ["hp/val_nse", "hp/val_mae", "hp/val_mae_flood"]: for i in [23, 47]: - trial.set_user_attr(f'{metric}_{i}', my_callback.metrics[metric][i].item()) + trial.set_user_attr( + f"{metric}_{i}", my_callback.metrics[metric][i].item() + ) return my_callback.metrics[self.monitor].item() def parse_args() -> argparse.Namespace: - """ Parse all the arguments and provides some help in the command line - """ - - parser: argparse.ArgumentParser = argparse.ArgumentParser(description='Execute experiments for exp_icaart.') - parser.add_argument('filename', metavar='datafile', type=Path,help='The path to your input data.') - parser.add_argument('gauge', metavar='gaugename',type=str, help='The name of the gauge column.') - parser.add_argument('logdir', type=Path,help='set a directory for logs and model checkpoints.') - parser.add_argument('trials', metavar='trials',type=int, default=n_trials,help='How many trials to run.') + """Parse all the arguments and provides some help in the command line""" - parser.add_argument('--expname', metavar='experiment_name',type=str, default='nameless',help='The name of the experiment.') - parser.add_argument('--storagename', metavar='storage_name',type=str, default=None,help='The database for the experiment.') - parser.add_argument('--architecture', metavar='architecture',type=str, default=None,help='Hyperparameter to decide an architecture.') + parser: argparse.ArgumentParser = argparse.ArgumentParser( + description="Execute experiments for exp_icaart." + ) + parser.add_argument( + "filename", metavar="datafile", type=Path, help="The path to your input data." + ) + parser.add_argument( + "gauge", metavar="gaugename", type=str, help="The name of the gauge column." + ) + parser.add_argument( + "logdir", type=Path, help="set a directory for logs and model checkpoints." + ) + parser.add_argument( + "trials", + metavar="trials", + type=int, + default=n_trials, + help="How many trials to run.", + ) + parser.add_argument( + "--expname", + metavar="experiment_name", + type=str, + default="nameless", + help="The name of the experiment.", + ) + parser.add_argument( + "--storagename", + metavar="storage_name", + type=str, + default=None, + help="The database for the experiment.", + ) + parser.add_argument( + "--architecture", + metavar="architecture", + type=str, + default=None, + help="Hyperparameter to decide an architecture.", + ) parser.add_argument( "--pruning", @@ -192,49 +260,62 @@ def parse_args() -> argparse.Namespace: return parser.parse_args() + def main(): - """Start a hyperparameter optimization with optuna and torchlightning. - """ + """Start a hyperparameter optimization with optuna and torchlightning.""" parsed_args = parse_args() if not parsed_args.logdir.exists(): parsed_args.logdir.mkdir(parents=True) - if parsed_args.pruning: pruner = optuna.pruners.HyperbandPruner( - min_resource=1, max_resource='auto', reduction_factor=3, bootstrap_count=0) + min_resource=1, max_resource="auto", reduction_factor=3, bootstrap_count=0 + ) else: pruner = optuna.pruners.NopPruner() - - - study_name = f"{parsed_args.filename.stem} {parsed_args.expname}" # Unique identifier of the study. - storage_name = default_storage_name if parsed_args.storagename is None else f"{storage_base}{parsed_args.storagename}.db" + study_name = f"{parsed_args.filename.stem} {parsed_args.expname}" # Unique identifier of the study. + storage_name = ( + default_storage_name + if parsed_args.storagename is None + else f"{storage_base}{parsed_args.storagename}.db" + ) # Logging, add stream handler of stdout to show the messages logging.basicConfig(level=logging.INFO) - logFormatter = logging.Formatter('%(asctime)s;%(levelname)s;%(message)s',datefmt='%Y-%m-%d %H:%M:%S') - fileHandler = logging.FileHandler(parsed_args.logdir / 'optuna.log',) + logFormatter = logging.Formatter( + "%(asctime)s;%(levelname)s;%(message)s", datefmt="%Y-%m-%d %H:%M:%S" + ) + fileHandler = logging.FileHandler( + parsed_args.logdir / "optuna.log", + ) consoleHandler = logging.StreamHandler(sys.stdout) fileHandler.setFormatter(logFormatter) consoleHandler.setFormatter(logFormatter) logging.getLogger().addHandler(fileHandler) - #logging.getLogger().addHandler(consoleHandler) + # logging.getLogger().addHandler(consoleHandler) optuna.logging.get_logger("optuna").addHandler(fileHandler) optuna.logging.get_logger("optuna").addHandler(consoleHandler) - logging.info('Start of this execution======================================================================') - logging.info("Executing %s with device %s and parameters %s ",sys.argv[0],devices,sys.argv[1:]) + logging.info( + "Start of this execution======================================================================" + ) + logging.info( + "Executing %s with device %s and parameters %s ", + sys.argv[0], + devices, + sys.argv[1:], + ) search_space = { - "hidden_size_lstm":[64, 128, 256], - "n_layers_lstm": [1,2], - "hidden_size": [128, 256], - "n_layers": [2,3], + "hidden_size_lstm": [64, 128, 256], + "n_layers_lstm": [1, 2], + "hidden_size": [128, 256], + "n_layers": [2, 3], } trials = parsed_args.trials - space_sizes = [len(v) for k,v in search_space.items()] + space_sizes = [len(v) for k, v in search_space.items()] for el in space_sizes: trials *= el @@ -244,16 +325,24 @@ def main(): storage=storage_name, direction="minimize", pruner=pruner, - load_if_exists=True) + load_if_exists=True, + ) study.set_metric_names(["hp/val_loss"]) - objective = Objective(**vars(parsed_args),batch_size=4096, gc_after_trial=True)#TODO REMOVE batch_size + objective = Objective( + **vars(parsed_args), batch_size=4096, gc_after_trial=True + ) # TODO REMOVE batch_size - #if parsed_args.architecture is not None: + # if parsed_args.architecture is not None: # for _ in range(parsed_args.trials): # study.enqueue_trial({"model_architecture": parsed_args.architecture}) - study.optimize(objective, timeout=None,callbacks=[lambda study, trial: torch.cuda.empty_cache()]) + study.optimize( + objective, + timeout=None, + callbacks=[lambda study, trial: torch.cuda.empty_cache()], + ) + if __name__ == "__main__": main() diff --git a/src/old/icaart_ensemble_hyper.py b/src/old/icaart_ensemble_hyper.py new file mode 100644 index 0000000000000000000000000000000000000000..ad140aa5a7f9523b369aef1db7758d79dc0e6874 --- /dev/null +++ b/src/old/icaart_ensemble_hyper.py @@ -0,0 +1,309 @@ +import argparse +import logging +import pickle +import sys +from pathlib import Path + +import lightning.pytorch as pl +import optuna +import torch +import yaml +from lightning.pytorch.callbacks import EarlyStopping, ModelCheckpoint +from lightning.pytorch.loggers import TensorBoardLogger + +import utils.helpers as hp +import utils.utility as ut +from utils.callbacks import OptunaPruningCallback, WaVoCallback +from data_tools.data_module import WaVoDataModule +from models.ensemble_models import WaVoLightningEnsemble, WaVoLightningAttentionEnsemble + +use_cuda = torch.cuda.is_available() +if use_cuda: + accelerator = "cuda" + torch.set_float32_matmul_precision("high") + + max_free = 0 + best_device = None + for j in range(torch.cuda.device_count()): + free, _ = torch.cuda.mem_get_info(j) + if free > max_free: + max_free = free + best_device = j + devices = [best_device] + +else: + accelerator = "cpu" + devices = "auto" + +storage_base = "sqlite:///../../../../data-project/KIWaVo/models/optuna/" + +if ut.debugger_is_active(): + max_epochs = 2 + default_storage_name = "sqlite:///../../../data-project/KIWaVo/models/optuna/icaart_ensemble_debug_01.db" + +else: + max_epochs = 2000 + default_storage_name = ( + "sqlite:///../../../../data-project/KIWaVo/models/optuna/icaart_ensemble_01.db" + ) + + +class Objective: + """ + This class defines the objective function for hyperparameter tuning using Optuna library. + + Args: + filename (str|Path): Path to .csv file, first column should be a timeindex + model_dir (str|Path): Path to the directory containing the base models + log_dir (str|Path): Path to the logging directory + select_strat (str): How to select the base models (random or first n) + model_count (int): How many base models to use + monitor (str, optional): metric to monitor. Defaults to 'hp/val_loss'. + """ + + def __init__( + self, + filename, + modeldir, + logdir, + select_strat, + model_count, + monitor="hp/val_loss", + **kwargs, + ): + # Hold these implementation specific arguments as the fields of the class. + self.filename = filename + self.model_dir = modeldir + self.log_dir = logdir + self.select_strat = select_strat + self.model_count = model_count + self.monitor = monitor + + def _get_callbacks(self, trial): + checkpoint_callback = ModelCheckpoint( + save_top_k=1, monitor=self.monitor, save_weights_only=True + ) + pruning_callback = OptunaPruningCallback(trial, monitor=self.monitor) + early_stop_callback = EarlyStopping( + monitor=self.monitor, mode="min", patience=3 + ) + my_callback = WaVoCallback(ensemble=True) + + return my_callback, [ + checkpoint_callback, + pruning_callback, + early_stop_callback, + my_callback, + ] + + def _get_model_params(self, trial): + model_params = dict( + hidden_size=trial.suggest_int("hidden_size", 32, 512), + num_layers=trial.suggest_int("n_layers", 2, 4), + dropout=0.25, + learning_rate=trial.suggest_float("lr", 0.00001, 0.01), + norm_func=trial.suggest_categorical("norm_func", ["softmax", "minmax"]), + ) + + return model_params + + def __call__(self, trial): + + model_params = self._get_model_params(trial) + # log_dir = '../../../data-project/KIWaVo/models/ensemble_debug/' + # model_dir = Path('../../../data-project/KIWaVo/models/icaarts_hollingstedt/lightning_logs/') + model_list = [] + model_path_list = [] + yaml_data = None + + if self.select_strat == "random": + all_models = [x.name.split("_")[1] for x in self.model_dir.iterdir()] + + elif self.select_strat == "first": + model_choice = list(range(self.model_count)) + + # for s in [0,1,2]: + for s in model_choice: + temp_dir = self.model_dir / f"version_{s}/" + + model_list.append( + hp.load_model_cuda(temp_dir, use_cuda=use_cuda, devices=devices) + ) + model_path_list.append(str(temp_dir.resolve())) + + if yaml_data is None: + yaml_data = hp.load_settings_model(temp_dir) + # with open(temp_dir / 'hparams.yaml', 'r') as file: + # yaml_data = yaml.load(file, Loader=yaml.FullLoader) + # yaml_data['scaler'] = pickle.loads(yaml_data['scaler']) + + config = { + "scaler": yaml_data[ + "scaler" + ], # TODO test how this works without giving scaler etc. outside of jupyterlab + #'filename' : yaml_data['filename'], + "filename": str(self.filename), + "level_name_org": yaml_data["level_name_org"], + "out_size": yaml_data["out_size"], + "threshold": yaml_data["threshold"], + "feature_count": yaml_data["feature_count"], + "differencing": yaml_data["differencing"], + "model_architecture": "ensemble", + } + + print("ACHTUNG GGF. FALSCHES MODELL") + # ensemble_model = WaVoLightningEnsemble(model_list,model_path_list,**model_params)#TODO 'ÄNDERN! + ensemble_model = WaVoLightningAttentionEnsemble( + model_list, model_path_list, **model_params + ) + + config["in_size"] = ensemble_model.max_in_size + + data_module = WaVoDataModule(**config) + + logging.info("Params: %s", trial.params) + + my_callback, callbacks = self._get_callbacks(trial) + logger = TensorBoardLogger(self.log_dir, default_hp_metric=False) + + trainer = pl.Trainer( + default_root_dir=self.log_dir, + gradient_clip_val=0.5, + logger=logger, + accelerator=accelerator, + devices=devices, + callbacks=callbacks, + max_epochs=max_epochs, + log_every_n_steps=10, + ) + trainer.fit(ensemble_model, data_module) + + # save metrics to optuna + model_path = str(Path(trainer.log_dir).resolve()) + logging.info("model_path: %s", model_path) + trial.set_user_attr("model_path", model_path) + for metric in ["hp/val_nse", "hp/val_mae", "hp/val_mae_flood"]: + for i in [23, 47]: + trial.set_user_attr( + f"{metric}_{i}", my_callback.metrics[metric][i].item() + ) + + return my_callback.metrics[self.monitor].item() + + +def parse_args() -> argparse.Namespace: + """Parse all the arguments and provides some help in the command line""" + + parser: argparse.ArgumentParser = argparse.ArgumentParser( + description="Execute experiments for exp_icaart." + ) + parser.add_argument( + "filename", metavar="datafile", type=Path, help="The path to your input data." + ) + parser.add_argument( + "modeldir", metavar="modeldir", type=Path, help="The path to your base models." + ) + parser.add_argument( + "logdir", type=Path, help="set a directory for logs and model checkpoints." + ) + parser.add_argument( + "trials", + metavar="trials", + type=int, + default=100, + help="How many trials to run.", + ) + parser.add_argument("select_strat", choices=["random", "first"]) + parser.add_argument( + "model_count", + metavar="mc", + type=int, + default=5, + help="How many base models to use.", + ) + + parser.add_argument( + "--expname", + metavar="experiment_name", + type=str, + default="nameless", + help="The name of the experiment.", + ) + parser.add_argument( + "--storagename", + metavar="storage_name", + type=str, + default=None, + help="The database for the experiment.", + ) + + return parser.parse_args() + + +def main(): + parsed_args = parse_args() + if not parsed_args.logdir.exists(): + parsed_args.logdir.mkdir(parents=True) + + if False: + pruner = optuna.pruners.HyperbandPruner( + min_resource=1, max_resource="auto", reduction_factor=3, bootstrap_count=0 + ) + else: + pruner = optuna.pruners.NopPruner() + + study_name = f"{parsed_args.filename.stem} {parsed_args.expname}" # Unique identifier of the study. + storage_name = ( + default_storage_name + if parsed_args.storagename is None + else f"{storage_base}{parsed_args.storagename}.db" + ) + + # Logging, add stream handler of stdout to show the messages + logging.basicConfig(level=logging.INFO) + logFormatter = logging.Formatter( + "%(asctime)s;%(levelname)s;%(message)s", datefmt="%Y-%m-%d %H:%M:%S" + ) + fileHandler = logging.FileHandler( + parsed_args.logdir / "ensemble_hyper.log", + ) + consoleHandler = logging.StreamHandler(sys.stdout) + fileHandler.setFormatter(logFormatter) + consoleHandler.setFormatter(logFormatter) + logging.getLogger().addHandler(fileHandler) + # logging.getLogger().addHandler(consoleHandler) + optuna.logging.get_logger("optuna").addHandler(fileHandler) + optuna.logging.get_logger("optuna").addHandler(consoleHandler) + + logging.info( + "Start of this execution======================================================================" + ) + logging.info( + "Executing %s with device %s and parameters %s ", + sys.argv[0], + devices, + sys.argv[1:], + ) + + study = optuna.create_study( + study_name=study_name, + storage=storage_name, + direction="minimize", + pruner=pruner, + load_if_exists=True, + ) + + study.set_metric_names(["hp/val_loss"]) + objective = Objective(**vars(parsed_args), gc_after_trial=True) + + study.optimize( + objective, + n_trials=parsed_args.trials, + timeout=None, + callbacks=[lambda study, trial: torch.cuda.empty_cache()], + ) + + +if __name__ == "__main__": + # TODO command line arguments + main() diff --git a/src/old/icaart_random_base.py b/src/old/icaart_random_base.py new file mode 100644 index 0000000000000000000000000000000000000000..be973b7023e841ee917f9e5b209861fb3dab189f --- /dev/null +++ b/src/old/icaart_random_base.py @@ -0,0 +1,296 @@ +import argparse +import logging +import pickle +import sys +from pathlib import Path +import random +import lightning.pytorch as pl +import optuna +import torch +import yaml +from lightning.pytorch.callbacks import EarlyStopping, ModelCheckpoint +from lightning.pytorch.loggers import TensorBoardLogger + +import utils.helpers as hp +import utils.utility as ut +from utils.callbacks import OptunaPruningCallback, WaVoCallback +from data_tools.data_module import WaVoDataModule +from models.ensemble_models import WaVoLightningEnsemble + +use_cuda = torch.cuda.is_available() +if use_cuda: + accelerator = "cuda" + torch.set_float32_matmul_precision("high") + + max_free = 0 + best_device = None + for j in range(torch.cuda.device_count()): + free, _ = torch.cuda.mem_get_info(j) + if free > max_free: + max_free = free + best_device = j + devices = [best_device] + +else: + accelerator = "cpu" + devices = "auto" + +storage_base = "sqlite:///../../../../data-project/KIWaVo/models/optuna/" + +if ut.debugger_is_active(): + max_epochs = 2 + default_storage_name = "sqlite:///../../../data-project/KIWaVo/models/optuna/icaart_ensemble_debug_01.db" + +else: + max_epochs = 100 + default_storage_name = ( + "sqlite:///../../../../data-project/KIWaVo/models/optuna/icaart_ensemble_01.db" + ) + + +class Objective: + """ + This class defines the objective function for hyperparameter tuning using Optuna library. + + Args: + filename (str|Path): Path to .csv file, first column should be a timeindex + model_dir (str|Path): Path to the directory containing the base models + log_dir (str|Path): Path to the logging directory + model_count (int): How many base models to use + monitor (str, optional): metric to monitor. Defaults to 'hp/val_loss'. + """ + + def __init__( + self, filename, modeldir, logdir, model_count, monitor="hp/val_loss", **kwargs + ): + # Hold these implementation specific arguments as the fields of the class. + self.filename = filename + self.model_dir = modeldir + self.log_dir = logdir + self.model_count = model_count + self.monitor = monitor + self.all_models = [int(x.name.split("_")[1]) for x in self.model_dir.iterdir()] + + def _get_callbacks(self, trial): + checkpoint_callback = ModelCheckpoint( + save_top_k=1, monitor=self.monitor, save_weights_only=True + ) + pruning_callback = OptunaPruningCallback(trial, monitor=self.monitor) + early_stop_callback = EarlyStopping( + monitor=self.monitor, mode="min", patience=3 + ) + my_callback = WaVoCallback(ensemble=True) + + return my_callback, [ + checkpoint_callback, + pruning_callback, + early_stop_callback, + my_callback, + ] + + def _get_model_params(self, trial): + # This is just so that optuna will actually execute the number of trials we want, it stops when it testes all parameter combinations + place_holder = trial.suggest_int("place_holder", 0, 10000) + + model_params = dict( + hidden_size=trial.suggest_int("hidden_size", 512, 512), + num_layers=trial.suggest_int("n_layers", 2, 2), + dropout=0.25, + learning_rate=trial.suggest_float("lr", 0.002, 0.002), + norm_func=trial.suggest_categorical("norm_func", ["minmax"]), + ) + + return model_params + + def __call__(self, trial): + + model_params = self._get_model_params(trial) + # log_dir = '../../../data-project/KIWaVo/models/ensemble_debug/' + # model_dir = Path('../../../data-project/KIWaVo/models/icaarts_hollingstedt/lightning_logs/') + model_list = [] + model_path_list = [] + yaml_data = None + + model_choice = random.sample(self.all_models, self.model_count) + + # for s in [0,1,2]: + for s in model_choice: + temp_dir = self.model_dir / f"version_{s}/" + + model_list.append( + hp.load_model_cuda(temp_dir, use_cuda=use_cuda, devices=devices) + ) + model_path_list.append(str(temp_dir.resolve())) + + if yaml_data is None: + yaml_data = hp.load_settings_model(temp_dir) + # with open(temp_dir / 'hparams.yaml', 'r') as file: + # yaml_data = yaml.load(file, Loader=yaml.FullLoader) + # yaml_data['scaler'] = pickle.loads(yaml_data['scaler']) + + config = { + "scaler": yaml_data[ + "scaler" + ], # TODO test how this works without giving scaler etc. outside of jupyterlab + #'filename' : yaml_data['filename'], + "filename": str(self.filename), + "level_name_org": yaml_data["level_name_org"], + "out_size": yaml_data["out_size"], + "threshold": yaml_data["threshold"], + "feature_count": yaml_data["feature_count"], + "differencing": yaml_data["differencing"], + "model_architecture": "ensemble", + } + + ensemble_model = WaVoLightningEnsemble( + model_list, model_path_list, **model_params + ) + config["in_size"] = ensemble_model.max_in_size + + data_module = WaVoDataModule(**config) + + logging.info("Params: %s", trial.params) + + my_callback, callbacks = self._get_callbacks(trial) + logger = TensorBoardLogger(self.log_dir, default_hp_metric=False) + + trainer = pl.Trainer( + default_root_dir=self.log_dir, + gradient_clip_val=0.5, + logger=logger, + accelerator=accelerator, + devices=devices, + callbacks=callbacks, + max_epochs=max_epochs, + log_every_n_steps=10, + ) + trainer.fit(ensemble_model, data_module) + + # save metrics to optuna + model_path = str(Path(trainer.log_dir).resolve()) + logging.info("model_path: %s", model_path) + trial.set_user_attr("model_path", model_path) + for metric in ["hp/val_nse", "hp/val_mae", "hp/val_mae_flood"]: + for i in [23, 47]: + trial.set_user_attr( + f"{metric}_{i}", my_callback.metrics[metric][i].item() + ) + + return my_callback.metrics[self.monitor].item() + + +def parse_args() -> argparse.Namespace: + """Parse all the arguments and provides some help in the command line""" + + parser: argparse.ArgumentParser = argparse.ArgumentParser( + description="Execute experiments for exp_icaart." + ) + parser.add_argument( + "filename", metavar="datafile", type=Path, help="The path to your input data." + ) + parser.add_argument( + "modeldir", metavar="modeldir", type=Path, help="The path to your base models." + ) + parser.add_argument( + "logdir", type=Path, help="set a directory for logs and model checkpoints." + ) + parser.add_argument( + "trials", + metavar="trials", + type=int, + default=100, + help="How many trials to run.", + ) + parser.add_argument( + "model_count", + metavar="mc", + type=int, + default=5, + help="How many base models to use.", + ) + + parser.add_argument( + "--expname", + metavar="experiment_name", + type=str, + default="nameless", + help="The name of the experiment.", + ) + parser.add_argument( + "--storagename", + metavar="storage_name", + type=str, + default=None, + help="The database for the experiment.", + ) + + return parser.parse_args() + + +def main(): + parsed_args = parse_args() + if not parsed_args.logdir.exists(): + parsed_args.logdir.mkdir(parents=True) + + if False: + pruner = optuna.pruners.HyperbandPruner( + min_resource=1, max_resource="auto", reduction_factor=3, bootstrap_count=0 + ) + else: + pruner = optuna.pruners.NopPruner() + + study_name = f"{parsed_args.filename.stem} {parsed_args.expname}" # Unique identifier of the study. + storage_name = ( + default_storage_name + if parsed_args.storagename is None + else f"{storage_base}{parsed_args.storagename}.db" + ) + + # Logging, add stream handler of stdout to show the messages + logging.basicConfig(level=logging.INFO) + logFormatter = logging.Formatter( + "%(asctime)s;%(levelname)s;%(message)s", datefmt="%Y-%m-%d %H:%M:%S" + ) + fileHandler = logging.FileHandler( + parsed_args.logdir / "ensemble_hyper.log", + ) + consoleHandler = logging.StreamHandler(sys.stdout) + fileHandler.setFormatter(logFormatter) + consoleHandler.setFormatter(logFormatter) + logging.getLogger().addHandler(fileHandler) + # logging.getLogger().addHandler(consoleHandler) + optuna.logging.get_logger("optuna").addHandler(fileHandler) + optuna.logging.get_logger("optuna").addHandler(consoleHandler) + + logging.info( + "Start of this execution======================================================================" + ) + logging.info( + "Executing %s with device %s and parameters %s ", + sys.argv[0], + devices, + sys.argv[1:], + ) + + study = optuna.create_study( + study_name=study_name, + storage=storage_name, + direction="minimize", + pruner=pruner, + load_if_exists=True, + ) + + study.set_metric_names(["hp/val_loss"]) + objective = Objective(**vars(parsed_args), gc_after_trial=True) + + study.optimize( + objective, + n_trials=parsed_args.trials, + timeout=None, + callbacks=[lambda study, trial: torch.cuda.empty_cache()], + ) + + +if __name__ == "__main__": + # TODO command line arguments + main() diff --git a/src/old/icaart_sizes.py b/src/old/icaart_sizes.py new file mode 100644 index 0000000000000000000000000000000000000000..57a4e5342511f3cc16fd09e68e6ff2c553ff6df8 --- /dev/null +++ b/src/old/icaart_sizes.py @@ -0,0 +1,292 @@ +import argparse +import logging +import pickle +import sys +from pathlib import Path +import random +import lightning.pytorch as pl +import optuna +import torch +import yaml +from lightning.pytorch.callbacks import EarlyStopping, ModelCheckpoint +from lightning.pytorch.loggers import TensorBoardLogger + +import utils.helpers as hp +import utils.utility as ut +from utils.callbacks import OptunaPruningCallback, WaVoCallback +from data_tools.data_module import WaVoDataModule +from models.ensemble_models import WaVoLightningEnsemble + +use_cuda = torch.cuda.is_available() +if use_cuda: + accelerator = "cuda" + torch.set_float32_matmul_precision("high") + + max_free = 0 + best_device = None + for j in range(torch.cuda.device_count()): + free, _ = torch.cuda.mem_get_info(j) + if free > max_free: + max_free = free + best_device = j + devices = [best_device] + +else: + accelerator = "cpu" + devices = "auto" + +storage_base = "sqlite:///../../../../data-project/KIWaVo/models/optuna/" + +if ut.debugger_is_active(): + max_epochs = 2 + default_storage_name = "sqlite:///../../../data-project/KIWaVo/models/optuna/icaart_ensemble_debug_01.db" + +else: + max_epochs = 100 + default_storage_name = ( + "sqlite:///../../../../data-project/KIWaVo/models/optuna/icaart_ensemble_01.db" + ) + + +class Objective: + """ + This class defines the objective function for hyperparameter tuning using Optuna library. + + Args: + filename (str|Path): Path to .csv file, first column should be a timeindex + model_dir (str|Path): Path to the directory containing the base models + log_dir (str|Path): Path to the logging directory + monitor (str, optional): metric to monitor. Defaults to 'hp/val_loss'. + """ + + def __init__(self, filename, modeldir, logdir, monitor="hp/val_loss", **kwargs): + # Hold these implementation specific arguments as the fields of the class. + self.filename = filename + self.model_dir = modeldir + self.log_dir = logdir + self.monitor = monitor + self.all_models = [int(x.name.split("_")[1]) for x in self.model_dir.iterdir()] + + def _get_callbacks(self, trial): + checkpoint_callback = ModelCheckpoint( + save_top_k=1, monitor=self.monitor, save_weights_only=True + ) + pruning_callback = OptunaPruningCallback(trial, monitor=self.monitor) + early_stop_callback = EarlyStopping( + monitor=self.monitor, mode="min", patience=3 + ) + my_callback = WaVoCallback(ensemble=True) + + return my_callback, [ + checkpoint_callback, + pruning_callback, + early_stop_callback, + my_callback, + ] + + def _get_model_params(self, trial): + # This is just so that optuna will actually execute the number of trials we want, it stops when it testes all parameter combinations + ensemble_size = trial.suggest_categorical("ensemble_size", [5, 10, 15, 20]) + # temperature = trial.suggest_categorical("temperature",[1/10,1,10]) + + model_params = dict( + hidden_size=trial.suggest_int("hidden_size", 512, 512), + num_layers=trial.suggest_int("n_layers", 2, 2), + dropout=0.25, + learning_rate=trial.suggest_float("lr", 0.002, 0.002), + norm_func=trial.suggest_categorical("norm_func", ["softmax"]), + ) + + return model_params, ensemble_size + + def __call__(self, trial): + + model_params, ensemble_size = self._get_model_params(trial) + # log_dir = '../../../data-project/KIWaVo/models/ensemble_debug/' + # model_dir = Path('../../../data-project/KIWaVo/models/icaarts_hollingstedt/lightning_logs/') + model_list = [] + model_path_list = [] + yaml_data = None + + model_choice = random.sample(self.all_models, ensemble_size) + + # for s in [0,1,2]: + for s in model_choice: + temp_dir = self.model_dir / f"version_{s}/" + + model_list.append( + hp.load_model_cuda(temp_dir, use_cuda=use_cuda, devices=devices) + ) + model_path_list.append(str(temp_dir.resolve())) + + if yaml_data is None: + yaml_data = hp.load_settings_model(temp_dir) + # with open(temp_dir / 'hparams.yaml', 'r') as file: + # yaml_data = yaml.load(file, Loader=yaml.FullLoader) + # yaml_data['scaler'] = pickle.loads(yaml_data['scaler']) + + config = { + "scaler": yaml_data[ + "scaler" + ], # TODO test how this works without giving scaler etc. outside of jupyterlab + #'filename' : yaml_data['filename'], + "filename": str(self.filename), + "level_name_org": yaml_data["level_name_org"], + "out_size": yaml_data["out_size"], + "threshold": yaml_data["threshold"], + "feature_count": yaml_data["feature_count"], + "differencing": yaml_data["differencing"], + "model_architecture": "ensemble", + } + + ensemble_model = WaVoLightningEnsemble( + model_list, model_path_list, **model_params + ) + config["in_size"] = ensemble_model.max_in_size + + data_module = WaVoDataModule(**config) + + logging.info("Params: %s", trial.params) + + my_callback, callbacks = self._get_callbacks(trial) + logger = TensorBoardLogger(self.log_dir, default_hp_metric=False) + + trainer = pl.Trainer( + default_root_dir=self.log_dir, + gradient_clip_val=0.5, + logger=logger, + accelerator=accelerator, + devices=devices, + callbacks=callbacks, + max_epochs=max_epochs, + log_every_n_steps=10, + ) + trainer.fit(ensemble_model, data_module) + + # save metrics to optuna + model_path = str(Path(trainer.log_dir).resolve()) + logging.info("model_path: %s", model_path) + trial.set_user_attr("model_path", model_path) + for metric in ["hp/val_nse", "hp/val_mae", "hp/val_mae_flood"]: + for i in [23, 47]: + trial.set_user_attr( + f"{metric}_{i}", my_callback.metrics[metric][i].item() + ) + + return my_callback.metrics[self.monitor].item() + + +def parse_args() -> argparse.Namespace: + """Parse all the arguments and provides some help in the command line""" + + parser: argparse.ArgumentParser = argparse.ArgumentParser( + description="Execute experiments for exp_icaart." + ) + parser.add_argument( + "filename", metavar="datafile", type=Path, help="The path to your input data." + ) + parser.add_argument( + "modeldir", metavar="modeldir", type=Path, help="The path to your base models." + ) + parser.add_argument( + "logdir", type=Path, help="set a directory for logs and model checkpoints." + ) + parser.add_argument( + "trials", + metavar="trials", + type=int, + default=100, + help="How many trials to run.", + ) + # parser.add_argument('model_count', metavar='mc',type=int, default=5,help='How many base models to use.') + + parser.add_argument( + "--expname", + metavar="experiment_name", + type=str, + default="nameless", + help="The name of the experiment.", + ) + parser.add_argument( + "--storagename", + metavar="storage_name", + type=str, + default=None, + help="The database for the experiment.", + ) + + return parser.parse_args() + + +def main(): + parsed_args = parse_args() + if not parsed_args.logdir.exists(): + parsed_args.logdir.mkdir(parents=True) + + if False: + pruner = optuna.pruners.HyperbandPruner( + min_resource=1, max_resource="auto", reduction_factor=3, bootstrap_count=0 + ) + else: + pruner = optuna.pruners.NopPruner() + + study_name = f"{parsed_args.filename.stem} {parsed_args.expname}" # Unique identifier of the study. + storage_name = ( + default_storage_name + if parsed_args.storagename is None + else f"{storage_base}{parsed_args.storagename}.db" + ) + + # Logging, add stream handler of stdout to show the messages + logging.basicConfig(level=logging.INFO) + logFormatter = logging.Formatter( + "%(asctime)s;%(levelname)s;%(message)s", datefmt="%Y-%m-%d %H:%M:%S" + ) + fileHandler = logging.FileHandler( + parsed_args.logdir / "ensemble_hyper.log", + ) + consoleHandler = logging.StreamHandler(sys.stdout) + fileHandler.setFormatter(logFormatter) + consoleHandler.setFormatter(logFormatter) + logging.getLogger().addHandler(fileHandler) + # logging.getLogger().addHandler(consoleHandler) + optuna.logging.get_logger("optuna").addHandler(fileHandler) + optuna.logging.get_logger("optuna").addHandler(consoleHandler) + + logging.info( + "Start of this execution======================================================================" + ) + logging.info( + "Executing %s with device %s and parameters %s ", + sys.argv[0], + devices, + sys.argv[1:], + ) + + ens_sizes = [5, 10, 15, 20] + search_space = {"ensemble_size": ens_sizes * parsed_args.trials} + sampler = optuna.samplers.GridSampler(search_space) + + study = optuna.create_study( + sampler=sampler, + study_name=study_name, + storage=storage_name, + direction="minimize", + pruner=pruner, + load_if_exists=True, + ) + + study.set_metric_names(["hp/val_loss"]) + objective = Objective(**vars(parsed_args), gc_after_trial=True) + + study.optimize( + objective, + n_trials=len(ens_sizes) * parsed_args.trials, + timeout=None, + callbacks=[lambda study, trial: torch.cuda.empty_cache()], + ) + + +if __name__ == "__main__": + # TODO command line arguments + main() diff --git a/src/predict_database.py b/src/predict_database.py index 0c29c2d20cc88bc4d61bb7d0edd672d1293108aa..ab30b028f4508aad912fbcf66d00f12d25ff2bdb 100644 --- a/src/predict_database.py +++ b/src/predict_database.py @@ -1,6 +1,7 @@ """ Skript to forecast water levels using a trained torch model and writing the results to an Oracle Database. """ + import argparse import logging import sys @@ -27,41 +28,52 @@ def get_configs(passed_args: argparse.Namespace) -> Tuple[dict, List[dict]]: yaml_path = passed_args.yaml_path gauge_configs = [] with open(yaml_path, "r", encoding="utf-8") as file: - docs = yaml.safe_load_all(file) - for doc in docs: - if doc["document"] == "main": - main_config = doc - if isinstance(main_config["zrxp_folder"],str): - main_config["zrxp_folder"] = [main_config["zrxp_folder"]] - main_config["zrxp_folder"] = list(map(Path,main_config["zrxp_folder"])) - main_config["sensor_folder"] = Path(main_config["sensor_folder"]) - main_config["start"] = pd.to_datetime(main_config["start"]) - if "end" in main_config: - main_config["end"] = pd.to_datetime(main_config["end"]) - else: - doc["model_folder"] = list(map(Path, doc["model_folder"])) - gauge_configs.append(doc) - + configs = yaml.safe_load(file) + + main_config = configs["main_config"] + if isinstance(main_config["zrxp_folder"], str): + main_config["zrxp_folder"] = [main_config["zrxp_folder"]] + main_config["zrxp_folder"] = list(map(Path, main_config["zrxp_folder"])) + main_config["sensor_folder"] = Path(main_config["sensor_folder"]) + main_config["start"] = pd.to_datetime(main_config["start"]) + if "end" in main_config: + main_config["end"] = pd.to_datetime(main_config["end"]) if passed_args.start is not None: main_config["start"] = passed_args.start - main_config.pop("end",None) + main_config.pop("end", None) main_config["range"] = False + gauge_configs = configs["gauge_configs"] + for gauge_config in gauge_configs: + gauge_config["model_folder"] = Path(gauge_config["model_folder"]) + assert all([isinstance(c,str) for c in gauge_config["columns"]]) + assert isinstance(gauge_config["gauge"],str) + #assert all([isinstance(k,str) and isinstance(v,str) for k,v in gauge_config["external_fcst"].items()]) + assert all([isinstance(c,str) for c in gauge_config["external_fcst"]]) + return main_config, gauge_configs def _prepare_logging_1() -> None: logging.basicConfig(level=logging.INFO) - logFormatter = logging.Formatter( - "%(asctime)s;%(levelname)s;%(message)s", datefmt="%Y-%m-%d %H:%M:%S" - ) - consoleHandler = logging.StreamHandler(sys.stdout) - consoleHandler.setFormatter(logFormatter) - logging.getLogger().addHandler(consoleHandler) logging.info("Executing %s with parameters %s ", sys.argv[0], sys.argv[1:]) + def _prepare_logging_2(con) -> None: + + old_factory = logging.getLogRecordFactory() + def record_factory(*args, **kwargs): + record = old_factory(*args, **kwargs) + record.gauge_id = kwargs.pop("gauge_id", None) + return record + logging.setLogRecordFactory(record_factory) + + logFormatter = logging.Formatter( + "%(asctime)s;%(levelname)s;%(message)s", datefmt="%Y-%m-%d %H:%M:%S" + ) dbHandler = OracleDBHandler(con) + dbHandler.setFormatter(logFormatter) + dbHandler.setLevel(logging.INFO) logging.getLogger().addHandler(dbHandler) logging.info("Executing %s with parameters %s ", sys.argv[0], sys.argv[1:]) @@ -85,11 +97,9 @@ def parse_args() -> argparse.Namespace: type=pd.to_datetime, help="The start date for the prediction. Format: YYYY-MM-DD HH:MM, overwrites start/end/range in the config file.", ) - parser.add_argument("--zrxp", action="store_true", help="Save predictions as ZRXP files") - - # parser.add_argument('-load_sensor',action='store_true',help="Force the") - # load_sensor - # load_zrxp + parser.add_argument( + "--zrxp", action="store_true", help="Save predictions as ZRXP files (not yet implemented)" + ) #TODO: Implement ZRXP return parser.parse_args() @@ -100,23 +110,28 @@ def main(passed_args) -> None: _prepare_logging_1() if "config_dir" in main_config["db_params"]: - logging.info("Initiating Thick mode with executable %s", main_config["db_params"]["config_dir"]) + logging.info( + "Initiating Thick mode with executable %s", + main_config["db_params"]["config_dir"], + ) oracledb.init_oracle_client(lib_dir=main_config["db_params"]["config_dir"]) else: logging.info("Initiating Thin mode") con = oracledb.connect(**main_config["db_params"]) _prepare_logging_2(con) - #This is logged twice into Stout, because if thinks break it would not be logged at all. + # This is logged twice into Stout, because if thinks break it would not be logged at all. if "config_dir" in main_config["db_params"]: - logging.info("Initiating Thick mode with executable %s", main_config["db_params"]["config_dir"]) + logging.info( + "Initiating Thick mode with executable %s", + main_config["db_params"]["config_dir"], + ) else: logging.info("Initiating Thin mode") connector = OracleWaVoConnection(con, main_config) - #connector.maybe_create_tables() # Create tables if they don't exist + # connector.maybe_create_tables() # Create tables if they don't exist connector.maybe_update_tables() # Potentially load new data - for gauge_config in gauge_configs: connector.handle_gauge(gauge_config) diff --git a/src/utils/bsh_extractor_v2.py b/src/scripts/bsh_extractor_v2.py similarity index 100% rename from src/utils/bsh_extractor_v2.py rename to src/scripts/bsh_extractor_v2.py diff --git a/src/scripts/icon_export_v3.py b/src/scripts/icon_export_v3.py new file mode 100755 index 0000000000000000000000000000000000000000..f7d765420c58bba913f4dfd76d475236cdfb6c3a --- /dev/null +++ b/src/scripts/icon_export_v3.py @@ -0,0 +1,548 @@ +""" +This module provides tools to extract zrxp files from grib2 ensemble predictions. +""" + +import argparse +import bz2 +import logging +import logging.handlers +import re +import subprocess as sp +import time +from pathlib import Path +from typing import List, Tuple + +import numpy as np +import pandas as pd +import pygrib +from shapely.geometry import Point +from shapely.geometry.polygon import Polygon +from sympy import elliptic_f +import yaml +from io import BytesIO + +logger = logging.getLogger("icon_export") +logger.setLevel(logging.DEBUG) + + +def setup_logging(log_file: Path, debug: bool, max_bytes: int = 50000) -> None: + """Set up rotating log files + + Args: + log_file (Path): Where to log to + debug (bool): Wheather to log debug level messages + max_bytes (int, optional): maximum filesize for the logfile. Defaults to 50000. + """ + log_file.parent.mkdir(parents=True, exist_ok=True) + formatter = logging.Formatter( + fmt="%(asctime)s;%(levelname)s;%(message)s", datefmt="%Y-%m-%d %H:%M:%S" + ) + + logHandlerFile = logging.handlers.RotatingFileHandler( + log_file, maxBytes=max_bytes, backupCount=1 + ) + logHandlerFile.setLevel(logging.DEBUG if debug else logging.INFO) + logHandlerFile.setFormatter(formatter) + logger.addHandler(logHandlerFile) + + logHandlerStream = logging.StreamHandler() + logHandlerStream.setLevel(logging.DEBUG if debug else logging.INFO) + logHandlerStream.setFormatter(formatter) + logger.addHandler(logHandlerStream) + + +def parse_args() -> argparse.Namespace: + """Parse all the arguments and provides some help in the command line""" + parser: argparse.ArgumentParser = argparse.ArgumentParser( + description="Extracts predictions from ICON Ensembles into zrx files" + ) + + parser.add_argument( + "yaml_path", + metavar="yaml_path", + type=Path, + help="""The path to your config file. + Each files is expected to contain: + shape_folder: The path to the directory with your areafiles or to a single area file. + index_folder: The path to the directory where you want to store your index files. + source_folder: The path to the directory with your .grib or .bz2 files. All files should have the same format. e.g. the same coordinates. + target_folder: The path to the directory for your zrx files. + mode: 'ensemble', 'hourly' or 'single' ICON forecast + Hourly means a folder with one .grib2 file per hour with 15 minute messages, not currently in use + force_index_calculation: Force generation of new index files, Defaults fo False + log_file, optional: Where to log information. + tidy_up: Weather to delete the input files after execution + debug: Weather to increase the log level to debug. + + Example: + shape_folder: ../data/meta_other/Gebiete/ + index_folder: ../data/meta_other/ensemble_index/ + source_folder: ../data/ICON_ENSEMBLE/ + target_folder: ../data/ICON_ENSEMBLE_EXPORT/ + mode: ensemble + force_index_calculation: False + log_file: ../data/ICON_ENSEMBLE_EXPORT/icon_export.log + tidy_up: True + debug: False + """, + ) + return parser.parse_args() + + +def get_config(passed_args: argparse.Namespace) -> dict: + """ + Retrieves the configurations from a YAML file. + + Args: + passed_args (argparse.Namespace): The command-line arguments. + + Returns: dict: The configuration + + """ + + with open(passed_args.yaml_path, "r", encoding="utf-8") as file: + config = yaml.safe_load(file) + config["shape_folder"] = Path(config["shape_folder"]) + config["index_folder"] = Path(config["index_folder"]) + config["source_folder"] = Path(config["source_folder"]) + config["target_folder"] = Path(config["target_folder"]) + if "log_file" in config: + config["log_file"] = Path(config["log_file"]) + + + assert config["shape_folder"].exists(), f"shape_folder {config['shape_folder']} does not exist" + assert config["source_folder"].exists(), f"source_folder {config['source_folder']} does not exist" + assert config["source_folder"].is_dir(), f"source_folder {config['source_folder']} is not a directory" + + return config + + +def get_area_name(area_path: Path) -> str: + """Generate an area name from the given filenname. + Returns the part of the filename after the first _ and the next . + For example: '../ICON/areas/ezg_willenscharen.txt' -> willenscharen + This is used for the name of the zrx file + Args: + area_path (Path): Path to a shape file (Tab seperated values). + + Returns: + str: name + """ + return area_path.stem.rsplit("_", maxsplit=1)[-1] + + + + +def get_mask_from_grid(lats: np.array, lons: np.array, area_file: Path) -> np.array: + """Returns a mask that is False everywhere, but at the indexes in the grid that + correspond to points in the polygon made from the area_file. + + Args: + lats (np.array): Latitudes of a grib file. + lons (np.array): Longitudes of a grib file. + area_file (Path): Path to one shape file (Tab seperated values). + + Returns: + np.array: _description_ + """ + logger.info("Generating mask from %s", area_file) + try: + df_area = pd.read_csv(area_file, index_col=0, sep=",", engine="python") + assert all( + pd.api.types.is_numeric_dtype(df_area[col]) for col in ["Lat", "Long"] + ) + except (KeyError, pd.errors.ParserError): + try: + df_area = pd.read_csv( + area_file, index_col=0, sep=None, engine="python", decimal="," + ) + assert all( + pd.api.types.is_numeric_dtype(df_area[col]) for col in ["Lat", "Long"] + ) + except (KeyError, pd.errors.ParserError): + df_area = pd.read_csv(area_file, index_col=0, sep=",") + assert all( + pd.api.types.is_numeric_dtype(df_area[col]) for col in ["Lat", "Long"] + ) + + polygon = Polygon(df_area.values) + + min_index_lat = np.argmax(lats > df_area["Lat"].min(), axis=0)[0] + max_index_lat = np.argmax(lats > df_area["Lat"].max(), axis=0)[0] + min_index_lon = np.argmax(lons > df_area["Long"].min(), axis=1)[0] + max_index_lon = np.argmax(lons > df_area["Long"].max(), axis=1)[0] + + mask = np.full(lats.shape, False) + for i in range(min_index_lat, max_index_lat + 1): + for j in range(min_index_lon, max_index_lon + 1): + mask[i, j] = polygon.contains(Point(lats[i][j], lons[i][j])) + + return mask + +def dict_to_zrx( + forecast_dict: dict, + target_folder: Path, + source_file: Path, + name: str, + member: int = 0, +) -> None: + """Takes a dictionary containing a forecast and saves it into an zrx file. + + Args: + forecast_dict (dict): forecast + target_folder (Path): Where to save the zrx file + source_file (Path): Input grib2 file + name (str): Name of the current area + member (int, optional): Ensemble number. Defaults to 0. + """ + df = pd.DataFrame(forecast_dict).astype({"forecast": "datetime64[ns]"}) + # df['value'] = df['value']-df['value'].shift(1).fillna(df['value'][0]) + df["value"] = df["value"] - df["value"].shift(1).fillna(0) + df["value"] = df["value"].apply(lambda x: max(0, x)) + df = df.round({"value": 3}) + + target_file = target_folder / f"{re.findall('20[2-9][0-9][0-1][0-9][0-3][0-9][0-2][0-9]',source_file.name)[0]}_{name}_{member}.zrx" + if target_file.exists(): + logger.warning("File %s already exists, file will not be saved.", target_file) + else: + with open(target_file, "w", encoding="utf-8") as file: + file.write("#REXCHANGEWISKI." + name + ".N.KNN|*|\n") + file.write("#RINVAL-777|*|\n") + # if mode in ['hourly','single']: + if member == 0: + file.write("#LAYOUT(timestamp,value)|*|\n") + df = df[["forecast", "value"]] + else: + file.write("#LAYOUT(timestamp,forecast, member,value)|*|\n") + + # df[['timestamp','value']] + df.to_csv( + path_or_buf=target_file, + header=False, + index=False, + mode="a", + sep=" ", + date_format="%Y%m%d%H%M", + ) + + +class ICON_Exporter: + """ + Class to extract zrx files from ICON (ensemble) predictions. + Args: + source_folder (Path): directory containing grib2 and/or bz2 files. + target_folder (Path): where to save the zrx files + shape_folder (Path): directory or file with shape files (Tab seperated values). + index_folder (Path): where to save the index files + gen_indexes (bool): generate (new) index files. + mode (str): 'ensemble', 'hourly' or 'single' ICON forecast + Hourly means a folder with one .grib2 file per hour with 15 minute messages, not currently in use + + + """ + def __init__( + self, + source_folder, + target_folder, + shape_folder, + index_folder, + gen_indexes, + mode, + tidy_up, + ): + self.source_folder = source_folder + self.target_folder = target_folder + self.shape_folder = shape_folder + self.index_folder = index_folder + self.gen_indexes = gen_indexes + self.mode = mode + self.tidy_up=tidy_up + self.indexes = None + self.names = None + + + if self.mode not in ["hourly", "single", "ensemble"]: + raise ValueError( + f"The value {mode} is not a valid mode. Must be hourly, single or ensemble." + ) + + def setup(self): + self.index_folder.mkdir(parents=True, exist_ok=True) + self.target_folder.mkdir(parents=True, exist_ok=True) + + def tear_down(self): + """Deletes old .grib2 and .bz2 files + """ + if self.tidy_up: + for data_file in filter(lambda x: x.suffix in [".grib2", ".bz2"], self.source_folder.iterdir()): + logger.info("Deleting file %s", data_file) + data_file.unlink() + + + def unpack_bz(self) -> None: + """Unpacks all files ending with .bz2 in a given folder or subfolder and saves them with the same name, just without the .bz2 + This should not really be necessary, but for some reason the pygrib library cannot handle the unzipped and loaded data + """ + #TODO Once pygrib 2.1.5 is released this wont be necessary anymore. + for source_file in filter( + lambda x: x.suffix == ".bz2" + and not x.with_suffix(".grib2").exists() + and not (x.parent / x.stem).exists(), + self.source_folder.iterdir(), + ): + if source_file.suffixes == [".grib2", ".bz2"]: + unpacked_source_file = source_file.parent / source_file.stem + else: + unpacked_source_file = source_file.with_suffix(".grib2") + logger.info("Unpacking file %s", source_file) + bz2_file = bz2.BZ2File(source_file) + bz2_data = bz2_file.read() + with open(unpacked_source_file, "wb") as file: + file.write(bz2_data) + + def set_indexes(self): + if self.names is None: + if (self.gen_indexes + or len(list(filter(lambda x: x.suffix == ".index", self.index_folder.iterdir()))) == 0 + ): + + #Select the smallest file + try: + grib_file = sorted( + filter(lambda x: x.suffix == ".grib2", self.source_folder.iterdir()), + key=lambda x: x.stat().st_size, + )[0] + except IndexError: + raise FileNotFoundError("No .grib2 files found in source folder") + lats, lons = pygrib.open(str(grib_file))[1].latlons() # pylint: disable=no-member + logger.info( + "Generating index files from %s, grid shape is %s", + grib_file.name, + lats.shape, + ) + + # because not all ICON files use the same grid, but we have no way of knowing which one until we load a message. + shape_files = list(filter(lambda x: x.suffix in [".txt", ".csv"], self.shape_folder.iterdir())) + index_files = [self.index_folder / shape_file.with_suffix(".index").name for shape_file in shape_files] + self.indexes = list(map(lambda x: np.where(get_mask_from_grid(lats, lons, x).flatten())[0]+ 1,shape_files)) + self.names = list(map(get_area_name, shape_files)) + + for name, index, index_file in zip(self.names, self.indexes, index_files): + if index_file.exists(): + logger.info("File %s already exists, replacing it.", index_file) + pd.DataFrame(index, columns=[name]).to_csv(index_file) + + else: + index_files = list(filter(lambda x: x.suffix == ".index", self.index_folder.iterdir())) + logger.info("Loading index files %s", index_files) + self.indexes = [pd.read_csv(index_file, index_col=0).values.flatten() for index_file in index_files] + self.names = list(map(get_area_name, index_files)) + + + def process(self) -> None: + if self.mode == "ensemble": + self.process_ensembles() + elif self.mode == "single": + self.process_singles() + elif self.mode == "hourly": + self.process_hourly() + + def process_singles(self) -> None: + self.set_indexes() + + for source_file in filter(lambda x: x.suffix == ".grib2", self.source_folder.iterdir()): + logger.info("Processing single forecast for file %s", source_file) + + gribfile = pygrib.open(str(source_file)) # pylint: disable=no-member + dict_dict = {} + first = True + + for message in gribfile.select(shortName=["tp"]): + start_time = pd.to_datetime(str(message.dataDate) + str(message.dataTime), format="%Y%m%d%H") + for index, name in zip(self.indexes, self.names): + if first: + dict_dict[name] = { + "timestamp": 48 * [start_time], + "forecast": pd.period_range( + start=start_time + pd.offsets.Hour(1), periods=48, freq="h" + ), + "member": 48 * [0], + "value": [], + } + else: + if isinstance(message.values, np.ma.core.MaskedArray): + dict_dict[name]["value"].append( + message.values.data.flatten()[index - 1].mean() + ) + else: + dict_dict[name]["value"].append( + message.values.flatten()[index - 1].mean() + ) + first = False + + for name, cur_dict in dict_dict.items(): + dict_to_zrx(cur_dict, self.target_folder, source_file, name, member=0) + + + + def process_ensembles(self) -> None: + """Helper function that splits the input ensemble file based on ensemble number, gets names and indexes, + and then calls extract_ensemble on all .grib2 files + """ + + # Iterate over all potential input files + tmp_folder = self.source_folder / "tmp" + tmp_folder.mkdir(parents=True, exist_ok=True) + for source_file in filter(lambda x: x.suffix == ".grib2", self.source_folder.iterdir()): + # split file + output_name = tmp_folder / source_file.name.replace( + ".grib2", "_[perturbationNumber].grib2" + ) # The pertubationNumber is not a Python thing, it's for the grib_copy command + logger.info("grip_copy %s into %s", source_file, output_name) + #You may need to install the eccodes library: https://confluence.ecmwf.int/display/ECC/ecCodes+installation#ecCodesinstallation-Quickinstallationguide + #Or just sudo apt install libeccodes-tools + sp.run(["grib_copy", source_file, output_name], capture_output=True, check=True) + self.set_indexes() + source_base = str(tmp_folder / (source_file.stem + "_")) + self.extract_ensemble(source_base) + for temp_file in tmp_folder.iterdir(): + temp_file.unlink() + tmp_folder.rmdir() + + def extract_ensemble(self,source_base: str) -> None: + """Extracts the predicted mean rainfall per hour for each ensemble member and area/catchment and exports them into zrx files. + + Args: + source_base (str): the name base of the grib2 and zrx files + """ + logger.info("Processing ensemble forecast for files %s", source_base + "*.grib2") + for i in range(1, 21): + logger.info("Ensemble member %s/20", i) + source_file = Path(source_base + str(i) + ".grib2") + gribfile_small = pygrib.open(str(source_file)) # pylint: disable=no-member + # doesn't work with 00:00: start_time = pd.to_datetime(str(gribfile_small[1].dataDate) +str(gribfile_small[1].dataTime),format="%Y%m%d%H%M") + start_time = pd.to_datetime( + f"{gribfile_small[1].dataDate} {gribfile_small[1].dataTime:04}", + format="%Y%m%d %H%M", + ) + dict_dict = {} + for name in self.names: + dict_dict[name] = { + "timestamp": 48 * [start_time], + "forecast": pd.period_range( + start=start_time + pd.offsets.Hour(1), periods=48, freq="h"), + "member": 48 * [i], + "value": [], + } + + for message in gribfile_small: + for name, index in zip(self.names, self.indexes): + dict_dict[name]["value"].append(message.values.flatten()[index - 1].mean()) + + for name, cur_dict in dict_dict.items(): + dict_to_zrx(cur_dict, self.target_folder, source_file, name, member=i) + + #Remove the grib2 file created by grib_copy + source_file.unlink() + + + def process_hourly(self) -> None: + """Extracts the predicted mean rainfall per hour for each area/catchment and exports them into zrx files. + """ + print("WARNING: UNTESTED CODE") + self.set_indexes() + + logger.info("Processing hourly forecast for folder %s", self.source_folder) + # get all current startimes from the file names + date_set = { + re.findall("20[2-9][0-9][0-1][0-9][0-3][0-9][0-2][0-9]", x.name)[0] + for x in filter(lambda x: x.suffix == ".grib2", self.source_folder.iterdir()) + } + + for icon_run_date in date_set: + dict_dict = {} + for i, source_file in enumerate( + sorted( + x + for x in self.source_folder.iterdir() + if icon_run_date in x.name and x.suffix == ".grib2" + ) + ): + assert source_file.name.endswith( + f"{icon_run_date}_{i:03d}_2d_tot_prec.grib2" + ) + if i == 0: + pass # The first file has messages vor 0, 0-15,0-30, 0-45 minutes, which we don't care about. + else: + gribfile = pygrib.open(str(source_file)) # pylint: disable=no-member + if i == 1: + start_time = pd.to_datetime( + str(gribfile[1].dataDate) + str(gribfile[1].dataTime), + format="%Y%m%d%H%M", + ) + for name, index in zip(self.names, self.indexes): + if i == 1: + dict_dict[name] = { + "timestamp": 48 * [start_time], + "forecast": pd.period_range( + start=start_time + pd.offsets.Hour(1), + periods=48, + freq="H", + ), + "member": 48 * [0], + "value": [], + } + + dict_dict[name]["value"].append( + gribfile[1].values.flatten()[index - 1].mean() + ) + + logger.debug("Message: %s", gribfile[1]) + logger.debug( + "Grid shape is %s ,does this match the index files?", + gribfile[1].values.shape, + ) + + source_file = [ + x + for x in self.source_folder.iterdir() + if icon_run_date in x.name and x.suffix == ".grib2" + ][0] + for name, cur_dict in dict_dict.items(): + dict_to_zrx(cur_dict, self.target_folder, source_file, name, member=0) + +def main() -> None: + """_summary_""" + + start = time.time() + parsed_args = parse_args() # configs/icon_exp_ensemble.yaml + config = get_config(parsed_args) + + setup_logging(config["log_file"], config["debug"]) + logger.info( + "Start of this execution======================================================================" + ) + + exporter = ICON_Exporter( + config["source_folder"], + config["target_folder"], + config["shape_folder"], + config["index_folder"], + config["force_index_calculation"], + config["mode"], + config["tidy_up"], + ) + exporter.setup() + exporter.unpack_bz() + exporter.process() + exporter.tear_down() + + end = time.time() + logger.info("Excecution time %s s", end - start) + logger.info( + "End of this execution======================================================================" + ) + + +if __name__ == "__main__": + main() diff --git a/src/predict.py b/src/scripts/predict.py similarity index 100% rename from src/predict.py rename to src/scripts/predict.py diff --git a/src/rl_ensemble.py b/src/scripts/rl_ensemble.py similarity index 100% rename from src/rl_ensemble.py rename to src/scripts/rl_ensemble.py diff --git a/src/rl_hyper.py b/src/scripts/rl_hyper.py similarity index 100% rename from src/rl_hyper.py rename to src/scripts/rl_hyper.py diff --git a/src/callbacks.py b/src/utils/callbacks.py similarity index 100% rename from src/callbacks.py rename to src/utils/callbacks.py diff --git a/src/utils/db_tools.py b/src/utils/db_tools.py index 5fe390ae27afc1996822021bc84166ae4c44d3d2..65af2e2533853a74b1b2acd76919407d62b38eea 100644 --- a/src/utils/db_tools.py +++ b/src/utils/db_tools.py @@ -17,6 +17,7 @@ from sqlalchemy import create_engine from sqlalchemy import select, bindparam, between, update, insert from sqlalchemy.orm import Session from sqlalchemy.dialects.oracle import FLOAT, TIMESTAMP, VARCHAR2, NUMBER +from sqlalchemy.exc import IntegrityError import torch from torch.utils.data import Dataset, DataLoader import lightning.pytorch as pl @@ -31,6 +32,7 @@ from utils.orm_classes import ( PegelForecasts, Log, Sensor, + InputForecastsMeta ) # pylint: disable=unsupported-assignment-operation @@ -47,6 +49,7 @@ class OracleWaVoConnection: self.members = self.get_member_list() self.times = self.get_times() + self.not_found = [] def handle_gauge(self, gauge_config: dict) -> None: """ @@ -60,69 +63,53 @@ class OracleWaVoConnection: Returns: None """ - # self.cur_config = gauge_config - for model_folder in gauge_config["model_folder"]: # self.cur_model_folder = model_folder - self.handle_model(gauge_config, model_folder) - - def handle_model(self, gauge_config, model_folder) -> None: - """ - Handles the model by loading it, setting it to evaluation mode, and making predictions for each time step and ensemble member. - - Args: - gauge_config (dict): The configuration for the gauge. - model_folder (Path): The folder containing the model files. - - Returns: - None - """ - model = hp.load_model(model_folder) + + model = hp.load_model(gauge_config["model_folder"]) model.eval() - model_name = model_folder.name created = datetime.now() for end_time in tqdm(self.times): - # load sensor sensor input data df_base_input = self.load_input_db( in_size=model.in_size, end_time=end_time, - columns=gauge_config["columns"], - external_fcst=gauge_config["external_fcst"], + gauge_config=gauge_config, + created=created, ) if df_base_input is None: - y = torch.Tensor(48).fill_(np.nan) - for member in self.members: - self.insert_forecast( - y, - gauge_config["document"], - model_name, - end_time, - member, - created, - ) continue # predict + # Get all external forecasts for this gauge + #stmt = select(InputForecastsMeta).where(InputForecastsMeta.sensor_name.in_(bindparam("external_fcst"))) + #params = {"external_fcst" : gauge_config["external_fcst"]} + #with Session(self.engine) as session: + # x = session.scalars(statement=stmt, params=params).fetchall() + # sensor_names = [el.sensor_name for el in x] + #params2 = {"ext_forecasts" : sensor_names, "tstamp" : end_time} + stmst2 = select(InputForecasts).where(InputForecasts.sensor_name.in_(bindparam("ext_forecasts")),InputForecasts.tstamp == (bindparam("tstamp"))) + params2 = {"ext_forecasts" : gauge_config["external_fcst"], "tstamp" : end_time} + df_temp = pd.read_sql(sql=stmst2,con=self.engine,index_col="tstamp",params=params2) + for member in self.members: self.handle_member( gauge_config, model, - model_name, df_base_input, member, end_time, created, ) + def handle_member( self, gauge_config, model, - model_name, df_base_input: pd.DataFrame, - member, + member: int, end_time, created: pd.Timestamp = None, ) -> None: @@ -133,10 +120,10 @@ class OracleWaVoConnection: Args: gauge_config (dict): The configuration for the gauge. model (LightningModule): The model to use for prediction. - model_name (str): The name of the model (basically name of folder containing the model files). df_base_input (DataFrame): The base input DataFrame with measure values, still need the actual precipitation forecast. member (int): The member identifier. end_time (pd.Timestamp): The timestamp of the forecast. + created (pd.Timestamp): The timestamp of the start of the forecast creation. Returns: None @@ -144,31 +131,34 @@ class OracleWaVoConnection: if member == -1: df_input = df_base_input.fillna(0) else: + + # replace fake forecast with external forecast - df_input = self.merge_synth_fcst( - df_base_input, member, end_time, gauge_config["external_fcst"] - ) + df_input = self.merge_synth_fcst(df_base_input, member, end_time, gauge_config,created) if df_input is None: return y = pred_single_db(model, df_input) self.insert_forecast( - y, gauge_config["document"], model_name, end_time, member, created + y, gauge_config["gauge"], gauge_config["model_folder"].name, end_time, member, created ) - def load_input_db(self, in_size, end_time, columns, external_fcst) -> pd.DataFrame: + def load_input_db(self, in_size, end_time, gauge_config,created) -> pd.DataFrame: """ Loads input data from the database based on the current configuration. Args: in_size (int): The input size in hours. end_time (pd.Timestamp): The end time of the input data. - columns (List[str]): The columns to load. - external_fcst (Dict[str,str]): The external forecasts to load/shift. + gauge_config (dict): The configuration for the gauge, important here are the columns and external forecasts. + created (pd.Timestamp): The timestamp of the start of the forecast creation. Returns: pd.DataFrame: The loaded input data as a pandas DataFrame. """ + + columns=gauge_config["columns"] + external_fcst=gauge_config["external_fcst"] start_time = end_time - pd.Timedelta(in_size - 1, "hours") stmt = select(SensorData).where( @@ -177,7 +167,6 @@ class OracleWaVoConnection: ) df_input = None try: - df_main = pd.read_sql( sql=stmt, con=self.engine, @@ -196,23 +185,18 @@ class OracleWaVoConnection: df_rest.index = df_rest.index - pd.Timedelta("48h") if len(df_input) != in_size: - raise LookupError( + raise MissingTimestampsError( f"Some timestamps are completely missing, found {len(df_input)}/{in_size} hours from {start_time} to {end_time}" ) for col in df_input.columns: if df_input[col].isna().sum() > 0: - logging.warning( - "Missing %s values in %s, attempting interpolation", - df_input[col].isna().sum(), - col, - ) + logging.warning("Missing %s values in %s, attempting interpolation",df_input[col].isna().sum(),col,extra={"gauge":gauge_config["gauge"]}) # Interpolation # Precipitation is filled with 0, other values are interpolated precip_cols = df_input.columns[df_input.columns.str.contains("Precip")] df_input.loc[:, precip_cols].fillna(0, inplace=True) - df_input = df_input.interpolate( limit=self.main_config["max_missing"], limit_direction="both" ) @@ -220,17 +204,12 @@ class OracleWaVoConnection: if df_input.isna().sum().sum() > 0: for col in df_input.columns: if df_input[col].isna().sum() > 0: - logging.warning( - "Missing %s values in %s after interpolation", - df_input[col].isna().sum(), - col, - ) - raise LookupError( + logging.warning("Missing %s values in %s after interpolation",df_input[col].isna().sum(),col,extra={"gauge":gauge_config["gauge"]}) + raise MissingValuesError( f"Missing {df_input.isna().sum().sum()} values after interpolation" ) # Shift columns that contain external forecasts by 48 hours - for col in external_fcst: df_input[col] = df_input[col].shift(-48) # Replace the values beyond that cutoff that we actually do know. @@ -238,14 +217,46 @@ class OracleWaVoConnection: # This is a little messy because we don't want to interpolate those values and need to be careful about missing timestamps df_input.loc[df_rest.index, col] = df_rest[col] - except LookupError as e: - if df_input is not None: - logging.error( - "No data for the chosen timeperiod up to %s and columns in the database.", - end_time, - ) - df_input = None + except MissingTimestampsError as e: + logging.error(e.args[0],extra={"gauge":gauge_config["gauge"]}) + df_input = None + except MissingValuesError as e: + logging.error(e.args[0],extra={"gauge":gauge_config["gauge"]}) + df_input = None + except KeyError as e: + logging.error( + "Some columns are missing in the database, found %s", + df_main.columns,extra={"gauge":gauge_config["gauge"]}) logging.error(e.args[0]) + df_input = None + #except LookupError as e: + # if df_input is not None: + # logging.error( + # "No data for the chosen timeperiod up to %s and columns in the database.", + # end_time, + # extra={"gauge":gauge_config["gauge"]} + # ) + # df_input = None + # logging.error(e.args[0]) + + + if df_input is None: + logging.error("Input sensordata could not be loaded, inserting empty forecasts",extra={"gauge":gauge_config["gauge"]}) + y = torch.Tensor(48).fill_(np.nan) + for member in self.members: + try: + self.insert_forecast( + y, + gauge_config["gauge"], + gauge_config["model_folder"].name, + end_time, + member, + created, + ) + except IntegrityError as e: + logging.error( "Sensorname %s is not a valid value in PEGEL_FORECASTS. %s",gauge_config["gauge"],e.args[0]) + break # if one member fails, don't try to insert the rest + return df_input @@ -254,7 +265,8 @@ class OracleWaVoConnection: df_base_input: pd.DataFrame, member: int, end_time: pd.Timestamp, - external_fcst: dict, + gauge_config: dict, + created: pd.Timestamp = None, ) -> pd.DataFrame: """ Merges external forecasts into the base input dataframe by replacing the old wrong values. @@ -263,24 +275,29 @@ class OracleWaVoConnection: df_base_input (pd.DataFrame): The base input dataframe. member (int): The member identifier. end_time (pd.Timestamp): The timestamp of the forecast. - external_fcst (dict): The external forecasts to merge. + gauge_config (dict): Settings for the gauge, especially the external forecasts and the gauge name. + created (pd.Timestamp): The timestamp of the start of the forecast creation. Returns: pd.DataFrame: The merged dataframe with external forecasts. """ + #TODO: besprechen ob wir einfach alle spalten durch vorhersagen ergänzen falls vorhanden? df_input = df_base_input.copy() - for col, fcst_name in external_fcst.items(): + + for col in gauge_config["external_fcst"]: try: - ext_fcst = self.get_ext_forecast(fcst_name, member, end_time) + ext_fcst = self.get_ext_forecast(col, member, end_time) except AttributeError: logging.error( "External forecast %s time %s ensemble %s is missing", - fcst_name, + col, end_time, member, + extra={"gauge":gauge_config["gauge"]} ) # if some external forecasts are missing, insert NaNs and skip prediction - # self.insert_forecast(torch.Tensor(48).fill_(np.nan), self.cur_member) + y = torch.Tensor(48).fill_(np.nan) + self.insert_forecast(y, gauge_config["gauge"], gauge_config["model_folder"].name, end_time, member, created) return None else: # Replace missing values with forecasts. @@ -292,9 +309,6 @@ class OracleWaVoConnection: df_input.iloc[nan_indices2, df_input.columns.get_loc(col)] = np.array( ext_fcst )[nan_indices] - # df_input.iloc[nan_indices, df_input.columns.get_loc(col)] = np.array( - # ext_fcst - # )[nan_indices] return df_input @@ -311,6 +325,9 @@ class OracleWaVoConnection: continue vhs_gebiet = zrxp_file.name.split("_")[1] # [6:] + if vhs_gebiet in self.not_found: + continue + df_zrxp = pd.read_csv( zrxp_file, skiprows=3, header=None, sep=" ", parse_dates=[0] ) @@ -352,12 +369,12 @@ class OracleWaVoConnection: member, ) - def get_ext_forecast(self, vhs_gebiet: str, member: int, end_time) -> List[int]: + def get_ext_forecast(self, sensor_name: str, member: int, end_time) -> List[int]: """ Retrieves the external forecast data from the database. Args: - vhs_gebiet (str): The name of the external forecast. + sensor_name (str): The name of the external forecast. member (int): The member identifier. end_time (pd.Timestamp): The timestamp of the forecast. @@ -365,26 +382,34 @@ class OracleWaVoConnection: List(int): The external forecast data. """ # TODO check new column in MODELL_SENSOR - if vhs_gebiet.startswith("Det_"): - member = 0 - - sensor = self.get_sensor_name(vhs_gebiet) # maybe check for none? - if sensor is None: - raise AttributeError(f"Sensor {vhs_gebiet} not found in database") - stmt = select(InputForecasts).where( + #TODO this would be more efficient outside the member loop + #TODO error handling + stmt = select(InputForecastsMeta).where(InputForecastsMeta.sensor_name == (bindparam("sensor_name"))) + params = {"sensor_name" : sensor_name} + with Session(self.engine) as session: + input_meta = session.scalar(statement=stmt, params=params) + + if input_meta is None: + raise AttributeError(f"Forecast {sensor_name} not found in Table InputForecastsMeta") + + #sensor = self.get_sensor_name(vhs_gebiet) # maybe check for none? + #if sensor is None: + # raise AttributeError(f"Sensor {vhs_gebiet} not found in database") + + stmt2 = select(InputForecasts).where( InputForecasts.tstamp == bindparam("tstamp"), InputForecasts.sensor_name == bindparam("sensor_name"), InputForecasts.member == bindparam("member"), ) params = { "tstamp": end_time, - "sensor_name": sensor.sensor_name, + "sensor_name": input_meta.sensor_name, "member": member, } with Session(self.engine) as session: - input_forecast = session.scalar(statement=stmt, params=params) + input_forecast = session.scalar(statement=stmt2, params=params) input_forecast = [input_forecast.__dict__[f"h{i}"] for i in range(1, 49)] return input_forecast @@ -506,8 +531,10 @@ class OracleWaVoConnection: **fcst_values, ) session.add(pegel_forecast) + #try: session.commit() + def maybe_update_tables(self) -> None: """ Updates the database tables if necessary based on the configuration settings. @@ -652,6 +679,8 @@ class OracleWaVoConnection: Returns: Sensor: A Sensor object (a row from the table Sensor). """ + + with Session(bind=self.engine) as session: # get all sensor names for the vhs_gebiet from the model_sensor table and check if there is only one. stmt = select(ModellSensor.sensor_name).where( @@ -662,6 +691,8 @@ class OracleWaVoConnection: ) if len(sensor_names) == 0: logging.warning("No sensor_name found for %s", vhs_gebiet) + self.not_found.append(vhs_gebiet) + return elif len(sensor_names) > 1: logging.warning("Multiple sensor_names found for %s", vhs_gebiet) @@ -859,13 +890,20 @@ class OracleDBHandler(logging.Handler): record.exc_text = "" log = Log( - created=pd.to_datetime(record.asctime), + created=pd.to_datetime(record.created,unit="s"), loglevelname=record.levelname, message=record.message, module=record.module, funcname=record.funcName, lineno=record.lineno, exception=record.exc_text, + gauge=record.gauge_id, ) self.session.add(log) self.session.commit() + +class MissingTimestampsError(LookupError): + pass + +class MissingValuesError(LookupError): + pass diff --git a/src/utils/orm_classes.py b/src/utils/orm_classes.py index 71e0288a7387f655e2091dc1dc63b21ffa1ebd5a..a80a75f680486c6657a02544e5bf27309cef5722 100644 --- a/src/utils/orm_classes.py +++ b/src/utils/orm_classes.py @@ -2,27 +2,16 @@ The alternative is to use table reflection from sqlalchemy. """ from typing import List, Optional +import datetime from sqlalchemy import Double, ForeignKeyConstraint, Index, Integer, PrimaryKeyConstraint, TIMESTAMP, VARCHAR,Identity from sqlalchemy.dialects.oracle import NUMBER from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column, relationship -import datetime class Base(DeclarativeBase): pass - -class ExampleTable(Base): - __tablename__ = 'example_table' - __table_args__ = ( - PrimaryKeyConstraint('tstamp', 'sensor_name', name='sys_c008698'), - ) - - tstamp: Mapped[datetime.datetime] = mapped_column(TIMESTAMP, primary_key=True) - sensor_name: Mapped[str] = mapped_column(VARCHAR(256), primary_key=True) - sensor_value: Mapped[Optional[float]] = mapped_column(Double) - class Log(Base): __tablename__ = 'log' __table_args__ = ( @@ -37,6 +26,19 @@ class Log(Base): funcname: Mapped[Optional[str]] = mapped_column(VARCHAR(64)) lineno: Mapped[Optional[int]] = mapped_column(Integer) exception: Mapped[Optional[str]] = mapped_column(VARCHAR(256)) + gauge: Mapped[Optional[str]] = mapped_column(VARCHAR(256)) + + +class InputForecastsMeta(Base): + __tablename__ = 'input_forecasts_meta' + __table_args__ = ( + PrimaryKeyConstraint('sensor_name', 'vhs_gebiet', name='UNIQUE_EXT_FORECASTS'), + #Index('sensordata_name', 'sensor_name'), + #Index('sensordata_ts', 'tstamp') + ) + sensor_name : Mapped[str] = mapped_column(VARCHAR(256), primary_key=True) + vhs_gebiet : Mapped[str] = mapped_column(VARCHAR(256), primary_key=True) + ensemble_members : Mapped[int] = mapped_column(NUMBER(5, 0, False)) class Modell(Base):