From 104b221e55f50ec95f77c36d57fc3ae42c5105a6 Mon Sep 17 00:00:00 2001
From: Hoa Ben The Nguyen <hbnguye@stud.ntnu.no>
Date: Thu, 21 Mar 2024 12:42:59 +0100
Subject: [PATCH] update: version

---
 server/lake_relations/added_lakes.txt         |   1 +
 .../__pycache__/add_new_lake.cpython-311.pyc  | Bin 0 -> 5838 bytes
 .../map/__pycache__/get_lake.cpython-311.pyc  | Bin 0 -> 4621 bytes
 .../get_measurements.cpython-311.pyc          | Bin 0 -> 6039 bytes
 server/map/add_new_lake.py                    | 144 ++++++++++++++
 server/map/get_lake.py                        | 110 +++++++++++
 server/map/get_measurements.py                | 178 ++++++++++++++++++
 7 files changed, 433 insertions(+)
 create mode 100644 server/lake_relations/added_lakes.txt
 create mode 100644 server/map/__pycache__/add_new_lake.cpython-311.pyc
 create mode 100644 server/map/__pycache__/get_lake.cpython-311.pyc
 create mode 100644 server/map/__pycache__/get_measurements.cpython-311.pyc
 create mode 100644 server/map/add_new_lake.py
 create mode 100644 server/map/get_lake.py
 create mode 100644 server/map/get_measurements.py

diff --git a/server/lake_relations/added_lakes.txt b/server/lake_relations/added_lakes.txt
new file mode 100644
index 00000000..b10ac55a
--- /dev/null
+++ b/server/lake_relations/added_lakes.txt
@@ -0,0 +1 @@
+mjosa
\ No newline at end of file
diff --git a/server/map/__pycache__/add_new_lake.cpython-311.pyc b/server/map/__pycache__/add_new_lake.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7b81c96389419f16b137c0fae8b00356a829eb42
GIT binary patch
literal 5838
zcmbtYO>7&-6`mz`$>sl267@&hqGd}qRa^dpB3bnhWcf$2RVT6H23C|1G<PLarbv2s
z8A;3%U<;#V45L<x1YrRe;305TxGvm70UdfsdP#cQ6}A<zfKauC555#E+yaKtL*EQZ
zk(8{WK!?k>Z|1#u^Y_i0H~R~h%Z?!Zsq^1LQ!_$;!<lNbRG1eFz<hyt#8WXeNx|C`
zGtHSN&2#i5J;zKk6qcD|mO1OBb&j26=WLTUQbxt>lXg<iG3nrG!8vW>nY;9)i?;}7
z;H|*BdG;=v^zb&p$=l)Y5V&az?*xgLrzTL)^&EHOYS3(0-%i99XA*J4J|2w=6S5eM
z&lsNXrDAflRsedo7>MTtaYis4sd!ji45i{xTrF9yA59B}>t=#q3?-&R^I=&KBc{qw
z@yiN-GyGrtoyNnKDO^}ivzt6mJp%smW<Fz5;K!Rfs;X#q(<X1eP57EDW9I3*NHN#z
zpGS|N?&Hn+8Cu3YsU(GNY>#JdleR^VY^&-Z#9JD*en`!r48vP97Wi8g`aJsJCmEJ!
zA#clAWk;>1!YUTU`UuAT7>}K#j7_l>svxKqlDERnZuYIQMGDE@nyO-JlxA!)fUKHc
zBlT5f2+3_VeyiqIs|r(a*0gJ!?62`KLfa!)hsT@i(rAT3_aE30%$FVJ%AqhYSAWhp
z73ZwGYGVrUka1<)8Bd0jyQ-xKDIUd@hHb>#8|^pfaEkkNI@6|%7usx+_tx2S%H6eG
zqfIdcJ!+~-XE$dPdOMnFZfLQYx7HB%U;Ea~GdpWS4_lO`9dOXHZ4S@N`>MTznQ3a2
zXMDUv4%W#XjdI1yJ0F@9-|Qw1c-KR7rd6@;(0*%UeF`ZKo=Ll4ogDW!c(>xfs8;N}
z=OL46lMmNg<~iQ0v^C-#YNJ9O>gu$&jDu*l*W@*hZ{}MXSB&V^rnjcLWxr#++0LrC
zP>0-V8SIIZHGXz?m4|UO_@cs|s-*tS`x><xbJ$&jvu(Ya-X{s-M}l}H7QQWnL?ITI
z!FNhW=4KO8xPL}S%u3)&SzzV_SzHvcw}@=ILC7%h$FO|y29PgcLn-Kk)(lneMir{&
zNE^HHsp)>(1o|ZknuuY86&AofM-yVuWSIJo8rF&nm+&SI1YVsS9iF-l9hRozay&J4
zUXX6fiR4sLOw1fVdGzR1c`Q?N;p9}9=R<K}K7_~BpIm%pKNX8gawIXAJd^fEQgR5C
z`cK6Yk#I~p158y2U_uhcbQY<eYvpHo=5uS_s<$81xS-AjH_Kmu8hVarBYo^*I2sf9
zfSd?K5^<PNIdFO6t*e2=%~>HL<Kl^1;iM2-3{*}vNyKBC`MBlRm;RFe@@$3q?Cf*!
zI>@kI6vA>!6ks0gQtD=ikA4)2@`k(0M}#;`6dZoEyqFZ=s1p-OXfrBEBKGozw^DyJ
z5sN|lu%tmo#HcjvGW0VPNu=U(g`bb|@-4$b%5MqLnOm~KPM4dP(!STi!(a$Lo_`A#
z)6UTZbS5qz#$8Es;bbxvjSv_e!Qe?VDS7(vP_WA|&m?)nCJJFbG!30FY#3D`G5~W-
zh#R&G3lSlSJ8L*1pe75U84*Tp0h2<UH`trdYaR}G5uZjPKB^2FFPz9?)`GzUYOS!y
zHz6b-l|%DU`Bn(S$gq&=lEH}BjA4&T(YPds;}L;?PsEPjutX9Gk(UfJST($%5OgsV
z#dAF!jtHTU(KIhcp<j4lAvsYVhf9KHVed#u*f|h{knAlOrXU&)EGqNR@;n(gLu?o{
z*;)*rnoCO1`FJG33&D1SmW0@}h);R4?LvH54jazOMkwdl>b5W(0E{Rv@EFa8JtD+n
zFkrHrgfB0r$YEm)4wEDiA7mo-dH@}~2+UkG9$F~B7U30MAUSx!jFn+#G?N*FR><T;
z<FGQ4cof@kH%KJIS<MsMPJ*clmW-wf16Hb_l(16`?2tqpSs2#JUKQVh9G(^F8`wBm
z^vu(AKcntGr+G$o&uDh6*zC`b6(;pvNA+F9E78?mm-Obb?0Avv;gwxq&FUxLfsLzq
zuIZj@*|BF<dv53lnY)=hwQB89tsU#`HnshV<{sDG<0?D8(bST?QuH=uFBhAdvsX5n
z{Uu~)MyWE*S&P1ooW0oEnR7hj{P|0T$)|peJFRo4b4-zSJ#gN0F1@GwdNj6IXM0t)
zcOA4YXk53>b?2CMt|Q;0aRHqR;7rTslLbce_3FM}jXR)o2Xf2?+q}khuCkqf^dEbA
zU}Zq_kLdmpjXkHc=T!2PXfMN-dJ)%@9or!N<e-afOgkYtx@1AUeP6z>vCv*FwIt-P
zuX|gUQu)-UnL=3e9@M=DpY%LsR+!(oR_<thBYNM6>K(~hpkCKrmG!SXd`tY#FKLeb
zx?_I{(YB#t=bnd$_0IkhGP%A>$dYBf`@phg`OdFwPmgKc1A6zsZ&NGZ|7zrSH@>B`
z!OQyKWp(eBC3lhAx5o9Ya(&BZG;UbuhE?*c`@7YBZ)pBO-9MNchlYK7p6%IN==)jX
zVWQZ5u*8`C?h?X;MIAkQ`x_;fwcTDqKyuC!hiu*lo_n6WQ)7E{wnt@qifq$T_!Cdg
zp0k5B@BU(I06%Tr#g<(qho#d}LO@iy^<Nxf+SIoGr&jgk6^$O(>2Z}FFVgH9-MUJ*
zE=d~g*J-~>`$<{LD&4a5q1rR3(L*{tq|!s1#TwnJ)2(1~gJ$nuUZb(<=kI<tsRoW~
zz7x9d#F}q-)i=B{sQE^9->62P*Xi?W^%3#OY0!8(M>gYdD8h#-&cEn{!{rMcDL|&2
zGG@h0B2zlUWGopgM3bz-;MiTUR_+J`a-A)OR&3KIh>L*Em>Q4Yj9p<1_!!!j6nmp4
zL@PGML?W%amYxnMG{nvjb-lJ_h^nDwhhqIdTXrZ^ImWBC1TC`=I8bm;6mgg*;!{&R
z2qfqf-^Tn1Cg(6A@(Y-|h{+{P#()@he7b~)TQsOee3ZlmgIbXA-J;$n5l18*!1XW*
z!(aLXkSyA8dGEiUy|}UWfJX1hUdRnB_2hj;Z*%rC;LqmHjuh>VT<Tt5u5aDbk-tzl
zmLFbbmhTkOYUgpyb3*r=$k71WHMV1w?a1%f*e;#zQrWH|Ys<mbZO>69D{?r?CQ8oj
zCP(rqFi9;B{7)t)VQ(~8v?C{A*GFL3a4E(IDgodw0zd$nNul7<@goes0tmM!1vNrO
zOn@fT-k!E<!qpc8zLFGLuDgUb2G`<I#Z<scy)D&WZe<+9RnpdrbYmYIb8scR1NL!n
zu`O-21G+y}OcO}F2C6|8q6r)%NDynmA-KsyC?g&L1rd)={0=64nBeIxqm?Ni&NK;h
znrao}Ga-C<O5$~_dIN}rZ4g+N@puReFj>UHp90CEVh8xF)(x(ugji;X5CE9N^Wf~g
zv-v^Iu}^pG1OExW+~2k4KeXyUwEUjtAJF{+x$$R!&_ho=nrBG&Kvmi`w83?+alutC
z_+<Z+^vWUa$R+*AC9Q8v?;F#&H+Ak!l{}kj%ZD^>Q0E3!Zt&%XW4B80uKKqy1ei7)
z`-I}*ISBkn_=E$S2Cs*goq*0wKv#c-*!ukizB|<Ut=hMjLY?OWYn8MWa8lz}lr3KI
zhg3b76!_GY20P$`fbuk<Od~7@P{v#jy4u$g_)27M)FmIv;P$n>bOP-|$B;zNn-<JB
z&^$$<8;F7*xv5#Mrkmb_&=bCa;mZV`%l`iUbocqF2$)NVi-Bn|F&9X}Ob7UcAO+$H
zIUp=RaQuoolD6RsL_di)sd+#vlZ20HvSCU{;uMSuht8lWiM=>O!&M?K;;W}Y<KQ$%
z6D9EjC=oxz#niNfHz5XN8H*>d2$N6XFMR_90yNsD+V^O5mri%7bl1AWb${@|nR{oz
z%{jVsM^_0^%o!4OozmQ=b@yqNJ^jq)%JEARpUmb@YVQ4ok2Ux4r=yx}NVfqZDJHNo
zvd%j1+m?d)cQ$=x$qWJ_CFCDi@1K~|%68;lzI?f(j0hzB?d9vX){$ZJ*TeM48SC#U
z3TUv2Twao4xQ<9-H|Q9gRKDNgJ9F)-Tt1iZ&V_41MIn)t$laxUiy^K6PqrAr<QOJ6
z)GePkcz?qxL?bepUJ-jXvZ=_tzhY0#CHPcKI3vbDfSs-MS0FGY6jelRYV|3i5q0OM
zh=$djpCWQ($*+iPS@J8f*D2}(RcJ0D!o2>KH2J93rI`|f*H&`WMeQk}O%kwDf&2tG
SCMvL8eyzMqayt?-2mb->om?3J

literal 0
HcmV?d00001

diff --git a/server/map/__pycache__/get_lake.cpython-311.pyc b/server/map/__pycache__/get_lake.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6e6e9b2c952ab484ff4170150fbfbb4af8e8a462
GIT binary patch
literal 4621
zcmZ`6T}&HCde*zP*S~~6+h8CNFknI)NSi=X0^vgbnugN9OG9XVj%RU9jP09ULZIst
zI^}_$A{{-o3ex2yKU6tR=>sp7@^BC7%cYOI*6OfUsubyzlk(;&R|!QY-F>shHV$Mw
z{&v2ZZ@&5FoB4j`uP&DzLHTF%e}%iX2>pv9rNZqrUQR;eKEeny2{g^X*OV~Lv(xN6
zH_b7{e)F^$`q_kK-a2iCKJG4>=CN6@VT)j&<*@ZGH|@YY^g1!~5enK~Q09|ClWv_#
zC6;2Tr0%@3kdWd#Sdh`{7D-$Z!gFFO8J1Gv*?2;Tns!8@9}E0h_`kftQ1N7x1!Ql*
z%tIJ|1pi`TC}kNd_0lgkVfG=c@Tk0sL0iUl(>?^`QMrRblC_M$=4ECXP;54!rJCn)
zlrhPsY)QG5Y0#?Xnqfhu@T;EBproS{4I+_SG~HpZp+$y4*AN3g8O@nX?6Ooldy33v
zcga~9#@1>bRb%vs;mG*O7eB(3W#`#cQWBEV5oswcEFXxZ(}{RABE?h5qx7s^c|?KS
z7Rb>=<dzU7Lc$mlkG`^nar}0OlIRRsHZMrCM}~vE?h=J020AgFN{WK+FwmPq1PerW
z(4k^m)VWkzNa`GcA>F)4uLFGO711q1GMd6dkkdI)NX+UsIv7qy<^`QI=r|maA|h3J
zJCs+)&z_q31f&#al2USEW<n5eNvZTqnxtaKP7Vyr6gMz4A4$)|1Sw265=t-WwX=d0
zy-9Zw#{v$+F!2FP9{%EU04pfZ@mriH$9dL7mGf(yU*Y^lUqg;-So>TF45{3(#tkdn
zaJgIMJR0XwIL|K@WOpm>?u{nZeQfi(>OS+NQ?;GfZ08GzG5de)%GcEYap@0BUw(1#
zi&d_`LZ2~UfS)PJe<-N*?bq7g-rldfz$3Hm?z>x@A?U~Vum9<K)~hxKwZ@>z^=Vw6
zQmSGd?DPju<M<H!!w@(Aj`fF82B7XLowQ-7!~;}S)=J%sVXaO$hWp5dq+K?+*G45u
zyHn=M7KlwUGHi;au=<u#tCCHaYbs4cb7dabe9Pcb8OyLTGlABa&pyj=F_baO9JW+O
zC(vi<j774QSs~eit(EcDR<^<@Um4BtU}t4Ztnhf<zG0P-8IUVu%h+W*wpBRD)}p-R
zuxx&VCG%2Eg)_{yS7ygv^LVqSYIzxS@ASRjO5pBGMK)u{UG$x^+S6nl*p+dDc3iRp
zWc^3RExWP1vX1QBk+Ma0y+KMhuBp)Pk)ltkFG+xq?5ZBeHMa~ONxqaC-%OQtRK_YT
z?5^gOsgWvTAh||rsL)pLnONnTIag^kQ-crSTG<(Auvuy?^@6N5b7k$%9VqozuNh--
z-2)S@f52uAz<t=?A0NOC-!)cC05`Kr%NZx&yR;Z%GOl}1*O+@@sk5{)LUYtCjRI@!
z(rT&q^}AOsYo=BTRw%Itd*#}PU_Fn@7F3J@ok;4d&~+iwnyIU@W1n32XY`jn_w5=)
zkWAh0>S!D(wicO1RE%E1rpe{Hv*6XGcrw-=BdLY77z%|5b*qG0pw7pH)Vv^(B|^o~
zogXdSpgvfHh$HHJ5xX>@TQAVL(6EaOH>j5m$Fc5+rcwmQlMzV}qq{r~qW*yD=_OSh
z%^omQxgrqlOp}C?{0%`S!rT$*)OO+p_=+D%MCNbc$msIHC>2`>?+87TNJS$FaWqsS
zI0ED%b$l!6@%ZD&-zQekOTZ{g_Li{p;(t(#RbEi`y7fG8TOa~#nC=9`hxCY}QI@pR
zSu8nB$?OoPpi7DiBJo4V$H2}Rk0nz?2*;CHxU<~2Zv@uO^K&UNvfO-*kQ8Z;C)=yU
z?+u=64|;TSn#7Zm&c@PMx6xQ@_;cMx&mc@?(OFR<IuCId*er^=1rh)V)5Wu~h$$io
zSZ5PLl2G&2EjJcsXCai^;k=Tf6iG%Q#z#}h+rpjj&4oll=Mr&I(s|(yFpQ^&ZZS>_
z!o86xm=mc87IiBmfD|Xxt?IUOccMa?W*otKLQl;IH^w~)(@4K77#XNzUWkaG*m=lW
zAi*%MPuN&?2QluF&cnz={Pv{@LNB>)Hk7GbXLnA;QQTW`qAR*U2(T-jy>3eii(&C*
z1k_^{62*nw^8$%Mo{-+jZ++Ex7}U5BHO{-P96PDF#zu8U5b5=#`O0g>xP`i_>hQ>i
zKucq;_zm3J6;v=yFy?`uc$eZnr1G5_->LAOkYF@;zdo&WoP2ym?YQ*gWwqm~TK}n5
z|LH29_qAtz8+}^Gajj$Q$;n*Ddz$acs;gi@E$s#LmhH&T6s%s(H@B=_es-{LV^lkM
zs(^UcG*g6Yj-UK(>tAe~(hi<b{X?36XpPVNyR%E0e{hWlK0R-5b&uq_N7U|7t$Vb9
z*jhT3!mlRR&aDqU^R}(anzw(G&v}Ox?{MDNx#jE2`T904ejj_h_#~r^eWHy1T0L=9
zJ8@Mx{Hf}@runWZzH8eBE9LTw6Zt!`{>{*4NNFBdo5!{0aS*koZS_jt*Sva(Zl$My
z4!VvQV9olhzIlD{>r>g*TzyEX59J$Lw;H-~4PDu*8y|c-tu_p34Fif%L2;g@^`@^r
z|N8T6B-hxZH1<3X3~mP1z=#$Y*$SM^1<pPhR|Ds@!1=XFp!OZghQ9Aty<?hp4AQN}
z2__#L(t>B#-H>Q?^gV0o-f$IIQ%^IXfzVI=1DkEy;6-ileYJl|>z`5%e^4;9fo4z-
z0EcbBQ$PTehW>&N1&(cX4d=Rsm6IPRzrjk^u-YYPUBcQG*kGW4WBl89*RJF{dN*pn
zZO%H^E^Yrj23!7Rf<bME9!zhzwt7$JdQYppV_NT++ImK7J)`i=AhOM`_>XN~P)@(E
z@>3c=rIhOSb4LSB83!uBxL2W#LB$<{s`5uQ{;0BFg=}-1CaMGQ6=|;g`wPAcE$C^B
z{UXag?XJ5p%03-6L%Yf~QyZtA{^h>`+^<R$=gKY|GdroEX`fV(fuzMGGg<1v_mt{K
z79=)fMiRzmI7kB7N)RxT!d>YkB!s&jPuMJ*s{=PYz|4>cS{|4();B%y<kucrkcyp=
zO%QIZla+u+4uZ_OyA;Ku;ARA}>@4AN4364l5Rs38GH50P@F9Z~yiLJT3XTH^@`PqO
z<W~T6%Ag!K$wk`5f(NfKBIlt`q;a?8af+^{mOg^wI2?h;SWy^q6^O!=dL6(DdR|j^
zFQc>`e{54*$DdqQTi;h}rnH)=m5cfM#+CQ-p0+JdPtMb$dV-oKSU`4j5qxQ1HLr^K
zM&Eji*4Vvj&o{Lwtw*$`(5m}6?^;c+&#8P+<AVwxgxl>8<lFkeFuZm!46l9F0oG${
z+~WN?-oJiX<$E>0S21ed;Z-;<34m}xml3<tAbdnL6r~?99CczTRFzj4;fjpH6x~~l
zpCNeRg(4V-sdJ}~KqLAXZ$9!C(2;j2pq|&z2RTGrbj?zpFrUH;31O66hdz37#cu$B
zJupljd6iPlqhrc`HIE!C#xIXfDAl#V+nK|l#u8{b$1nrg`U0ZgjfvuSANZlEhVd29
IUSNp*e<es&ZvX%Q

literal 0
HcmV?d00001

diff --git a/server/map/__pycache__/get_measurements.cpython-311.pyc b/server/map/__pycache__/get_measurements.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..553c75246fe5f79b0d08aed421fd6b9a539d05da
GIT binary patch
literal 6039
zcmb6dTWlN0agRKb$CpS+)RU5UloiRcEm=~W_z_F-LyGNKl9R|yU`2^ab5Am5KIFTj
zWUI&62=c)i1|p*Ys<sNs0BXu2LE*191zMn=LE8X#!T}-<5MZFlhyDlz0gRw%JA231
z$##Jr?`CIaXJ=+-XJ7oS*Xu&?$Zh}PRyq*+Cn3s@rG$KU6_B4Jj4%~Pv!t}dsRT7k
zQ3lU4Ytiehvow^}xGiCywd-|s+>u~r8Nh8Hp;;%kbFKvoc6>z7x-m6_`k6gqV!EHw
z7#tBfF_z!}Z&>AHA~(Mzq>}SuYJMRWCtPoo=RjtDkx!+Ug{ZaU94S2T5&FB|JBXuB
za5Q6;s2s4KekAMX6hdFXXFL=F$qH5GlJax-zNj!VbPQ!|5<QOI`&-5?*|DXqTIT@T
zTBU)aO_a?<*-ezgL@_4HS*1i<#T)|tH9E}w!LCgv!$=Ovwu1N(iIHqljRowkYFwfC
z@bhbPwGpnEmKd`&3O&ezEj#v9^#bKJQFSKDXQKR7N_1AtkRH}oYs@xr!=_bY7Af4g
zX(#@lhGq}`zKAkT9LTstcU24aV%z7C#$S|E8NulNA>#&Fk4esgo1T)3WW1ucs%r^y
z%XqM5zt&9Mb0wS2dZoHGP?GUUF6@))Bp+_Ut+>rJw!VG6CFALrJkJ>qzgZfx88okf
z-x=3~>ylfnFMExU*jUAv^l49MbC#R6;db!Ohl997kGY3LJX0?@pW{otxi#d*AH|lc
zP05xcGxe$Ge|aD|^>{Ui!D_Rmx-yPC%@#9_68vyiHFK7H71_{DJqxOJnSj&)`S9yZ
zeQlJaddya3_UU&(ZOsgV)^{_0Q)@pCKeu(0<j39B+Dx;lrNe9sA0Rz9Ni>@!^vJYG
zft<hG$J){yRq(Wb4*H*I&9q5vxTorc)MUhkZ<RpTQYBxcR{X+(h0HCO-)4TV!GCb*
z!Mz7L@rA0@rAmf@SAi<OikZDK_b1aX9<EB_K73GW{~T7KFDfe$g~WlXghZR$V~f!f
zq0uDO@N6l*K|EU3h5OA?uw@;}1e49C-nEwHr3PI0<ze&82I<2GkT_D7L8u4uU6~G(
zl_OHeuh6HKXW6<2n`x%wgN`I)l&jP#x^2CNRw)VP+N)Zt7#`TK2PA9eNsZ(nK5F(B
z)(Mn@4gcxKJVHcnsk1DV>ndZlr4-DCYUwhLhs^$&DRMx7Bf#)<<j0VgF1F^GnV6h7
zf0a!PPH_<-&2tGZDP9^s0*o0hDWrG<zY3>?k^m-k=Ru0&CnI7FkxCvhX)YNNN{X)D
zS>}$g!r<Ac7+V4CS+ds$ry~jOnQHLhO#1qGY$YbdQc2<z9=t$KCk8qdOU4rE#MK+I
z=*=W22+vlJ2hXl>d}NWUk;B!##KbfP$10pua<RmEmam{J8cp+&=p9q-Yj0hdVryc_
zCQQ|rrl%*~Vqd><X___S%L?q3>6&P>qY$HV=%3g0TngX0vhWTZ(RlW{(OxGApF|Lp
z7@;&Nn4Ea+YRPmplq@#d*SLIX9NJ@ZLYNf2b8+IWiIN2n?VDOk36Z{v2v64jJ%#{s
z%Ge{@-yS(B?2#>LkL>e%WLxQHGzJD0$5t+lYmU-rY91KN%J^uWvnz{L$_;r3NvDsB
z<{?CRj5MY^8k+5VDxTsskD*%67hz9#iL7mVGy&ucx<?Lqdv*Z*R&b=0?V8KLNV;lv
zBU$_Hn%5K`%~LxeG-qWxRM1pXvm1)|W@3gc^?PJS?~zS%5k7c2&Xf?gj1*rjaeB_I
zIYmwoSvadluCDb691p2`G#<Ih&GTFwVvtG-M-wFQ^ANy6a)8&EMJ|=#ME;KEcq0|R
zvzSWqESTX*uz6zl2Wmib3F+&k$$Sh$12TmOR+p$wf1uZzo@r3C=@!@erc=6O>?%yS
z8*GB(M1d7=aBMV{#4+L<yO2sJ@tWn`_ux8Vk0miI7;EhlJfGs(MNXWrF((}CKgI5S
z4eqUZ&Zl5qNs0p^M1Rd6SzeCEqIzGCk`Auf(&ECviGEgdaJRW=TI4ilffJ)QBJsFZ
z7mdWDX^0GGjK3XPb4jex*HfvuX5%BtMHo;ha4MlW(#hCDice@(LF9Ro7n(JhTGi-y
zOb|8uN+h1<1kJj*j5Q}*g|L1x(wro|^Taoefm;)FW>wHE5mBS{@Y~|42o^L4+@lE1
zlT6dBaW1LZmibsx)NDANSQa$r#O)}zOroTD1#l4tR9J@L<TSTV-T;FfuenK;feV^#
zm0ZoBAlEg`&LyKM%=LF_w7|s|G&?LruvTl{Rk9?MR{_l_+=|cNN^_79wC=0FChYSh
zvotI4d9u%Io>E@u6Gp2i$!8iR(5BB-k}XsWJU7zFm|q&Tq<IrDxI>m#V9gs@S*#)5
zMr%X6yT%WV)*T_aYq;bs=eI#J&=A)BOXK_z-OVJoYTQeSBQ}y*w>8HCTvuW9(i%){
z^<mK5i3op_;|*;TCy*VsW&?a0{bBU{sktA)1Q6zuVlq89&Ivcg)biXipIRI_IW#n9
z%#FE3WO;6XE)Oo>(R_pnDLHQ#7WkWBk*t5hXOI&2P|@<H#TG1DJWZagv)F;0zK`qf
z*R9`Cn6S!(WhVUC*Yxq-`*#%|tNPe1U9_W)E}3bCGSFH?-OdIbWX~5m*<AhR+u7+N
zg8-#AUn>N=Hg0XcTwo96&TTo$MW{PBvbkEOIzl<i=7rt1zPvSm?n{T#Hl(%<73-ZH
zbwvaqJ6UW+{w_H@Ec?j&xG^O69sjCM?Yr<bfOq6Z@)iO;x%bt;u}1*LcLJBU1DC&^
zR07x3z%@B=?McyvdXE))`tsD)YlU!6?#->{Qt`ON?CsBow-$E0dvij5cx&aeBU?v`
z4m#9aL<B(dk=%5#$$bp^3PAR=xh};&B>RVoU8pl8Gi`-HbJqE7ee?RQjqZmXpLCFz
zjOZYHw$Ra)o%*(^ePejz`op<T=8DMg4e4N=+GTsU>hiZ9bt~*kD*Mv9cOT;`ufk5O
zdm-52Lxm7q=ou#OfnMkhL~ytBaDGbZJf(J?f{=y4I{@qMVl%4m+VS^n`+IWlD*j>B
zKP>x)3;wo^Hy_rmJJ+3{uKU0)d*C-0w<bTE-kMg}7ghGfLT4Dp@G%=MbPp7|x*-5T
z&`5v_g$JSgK^PMN*~`Ut<n7w=^lW>G0nf1N8J6|;dj|Ofa-e4?aCkd#SP2ZMfdR!g
zsQL!w;d6@bT$cXcg90s|jLP8)O5;Vf@nV)Kw6eL@{2?{`k{W*XuP3*|lWOZ!mUOv;
zEuuQ-3}wLV<y}atV18Z=k1CyGYUfyXx)3^)A5%l8ipb+7as;lscOmb7l0T(}URHv~
z)!^}U=5eq$cSj8luQR)leq%~#L=BBRno&Y0{_^5ixBoQy=TRkeaoqzp`;P7Op5E>~
zt@Mtmy<<gWt0#I1d^o*6v2pxyOV@^^wj6oHY`2_{TTVP4dws*JwjRtkJ#xwYr<B%L
z3gd5;IP%aLx&MsPdiF`tt?Tg-|GsBYSNG;@-n(;fbo=0_a`094;HygK8MX6_%(Q>Y
zw9CPfM;GMLD+=?5%Df>nZ$MN%zO4AulMhaAQ1{2OW1r6C0>8Mnc`YCQ<@+1&?|SOx
zhSQ2?RP~I?p3y>kSN3xD^7nQW3~e6DU40am+m9*j$JF*?*~?F!eCrMr5t;E7u*-Pm
zy21P~0C*MVsLC9bnWKf~_N=@5dje0<1`SE+LN@0P-MmdVuL}wtROz5h2MaW_LkG6$
z!20|03&$1ugi4=~=@S*ULN}{)vrIQbm~GCFUM~s1yVKOS-PD(NC{06Z(~v?BtMss3
zeg$&%`f&1<$uabgG1pYD^&2KI6}Em8wgIj=D=U1o_K!TWg_7^P4*|gcmvE7aT5em&
z$_Hl;_yg~zF3sDL778UT*g6CMZ0?hx_sQ5mhTbP*-zVdkflCv%-z0l2;w_kgHcq(e
zxTf_}C{OO*apD+4|GsNQJh_G$^pZ}KKbj2sZWQWu^gnM?AC%CaHTD7bcc`D@;~*)J
zPO$tEK>o!t$diiu0PdlJgUOzh9c}P#>u`fIN`nWm;A_jS6`Zc@HQ5=2cUy-WVWTv7
z0RKO+hV?h^cV;{FMvsYs@4y?>@8Qoup`UK}<ACYsmt{BrMFf77&@IMBHV7NpU<BiN
zvbB;Erg3s<tf?hCnqEWB%=&(&*%siqBN|~y5^jnB5}F^F(}@&L$GI_H1d_x|cnF|q
zr6{U^-1qcf0d>mew}8C&^j`tB$mO?yUY5;o0S(CJx9FO+P_Xvbz%|-MwQq1m1jTcK
H?(%;DFj3PQ

literal 0
HcmV?d00001

diff --git a/server/map/add_new_lake.py b/server/map/add_new_lake.py
new file mode 100644
index 00000000..cae00de1
--- /dev/null
+++ b/server/map/add_new_lake.py
@@ -0,0 +1,144 @@
+import geopandas as gpd
+from shapely.geometry import Polygon, LineString, MultiLineString
+from shapely.ops import linemerge, unary_union, polygonize
+import json
+import os
+
+
+# Read a json file with relation data and send to response object
+def cut_map(self, body_of_water: str):  # NB: implement body_of_water
+    # Read relation from GeoJson file and extract all polygons
+    geo_data = gpd.read_file("server/lake_relations/mjosa.geojson")
+    polygon_data = geo_data[geo_data['geometry'].geom_type == 'Polygon']
+    polygons = [Polygon(polygon.exterior) for polygon in polygon_data['geometry']]
+
+    if len(polygons) <= 1:
+        raise Exception("Failed to convert JSON object to Shapely Polygons")
+
+    divided_map = []
+    cell_width = 0
+    cell_height = 0
+
+    for polygon in polygons:
+        cell_width = 0.04
+        cell_height = 0.02  # NB could be calculated based on cell_width and distance from equator
+
+        lines = create_grid(polygon, cell_width, cell_height)
+        lines.append(polygon.boundary)
+        lines = unary_union(lines)
+        lines = linemerge(lines)
+        lines = list(polygonize(lines))
+
+        divided_map.extend(combine_grid_with_poly(polygon, lines))
+
+    '''
+    ####################### PLOTTING ############################
+    tiles = [gpd.GeoDataFrame(geometry=[tile]) for tile in divided_map]
+
+    print("Plotting... This may take some time...")
+    # NB test plot
+    fig, ax = plt.subplots()
+    ax.set_aspect(1.5)
+
+    # Plot each tile
+    for tile in tiles:  # NB temporarily limited to 5 tiles
+        random_color = "#{:06x}".format(random.randint(0, 0xFFFFFF))
+        gpd.GeoSeries(tile.geometry).plot(ax=ax, facecolor=random_color, edgecolor='none')
+
+
+    plt.show()
+    ##################### PLOTTIND END ###########################
+    '''
+
+    features = []
+
+    sub_div_id = 0
+    for tile in divided_map:
+
+        # Calculate tile center based on bounds, and round down to two decimals
+        min_x, min_y, max_x, max_y = tile.bounds
+        center = round(max_x - min_x, 4), round(max_y - min_y, 4)
+        # center = round(tile.centroid.coords[0][0], 4), round(tile.centroid.coords[0][1], 4)
+
+        rounded_coordinates = []
+        if isinstance(tile, Polygon):
+            for coords in tile.exterior.coords:
+                rounded_coords = (round(coords[0], 4), round(coords[1], 4))
+                rounded_coordinates.append(rounded_coords)
+            rounded_tile = Polygon(rounded_coordinates)
+
+        tile_feature = {
+            'type': 'Feature',
+            'properties': {
+                'sub_div_id': str(sub_div_id),
+                'sub_div_center': center,
+            },
+            'geometry': rounded_tile.__geo_interface__
+        }
+        features.append(tile_feature)
+        sub_div_id += 1
+
+    feature_collection = {
+        'type': 'FeatureCollection',
+        'tile_count': sub_div_id,  # Add the last subdivision ID as number of tiles
+        'tile_width': cell_width,
+        'tile_height': cell_height,
+        'features': features,
+    }
+
+    write_json_to_file("server/lake_relations", "mjosa", feature_collection)
+    self.send_response(200)
+    self.send_header("Content-type", "application/json")
+    self.end_headers()
+
+    self.wfile.write(json.dumps(feature_collection).encode('utf-8'))
+
+
+def create_grid(poly: Polygon, cell_width, cell_height):
+    # Retrieve bounds of the entire polygon
+    bounds = poly.bounds
+
+    min_x, min_y, max_x, max_y = bounds
+    grid_lines = []
+
+    # Horizontal lines
+    y = min_y
+    while y <= max_y:
+        line = LineString([(min_x, y), (max_x, y)])
+        grid_lines.append(line)
+        y += cell_height
+
+    # Vertical lines
+    x = min_x
+    while x <= max_x:
+        line = LineString([(x, min_y), (x, max_y)])
+        grid_lines.append(line)
+        x += cell_width
+
+    return grid_lines
+
+
+def combine_grid_with_poly(polygon, grid):
+    intersecting_tiles = []
+
+    for line in grid:
+        if line.intersects(polygon):
+            intersection = line.intersection(polygon)
+            # Check if intersection is a MultiLineString
+            if isinstance(intersection, MultiLineString):
+                # Extend the intersecting tiles with the polygonized results
+                intersecting_tiles.extend(list(polygonize(intersection)))
+            else:
+                intersecting_tiles.append(intersection)
+
+    return intersecting_tiles
+
+
+def write_json_to_file(path: str, file_name: str, json_data: dict):
+    # NB add lake name to 'added_lakes.txt'
+    print("Writing to file...")
+    if not os.path.exists(path):
+        raise Exception("Directory from path does not exist")
+
+    with open(path + '/' + file_name + '_div.json', 'w') as f:
+        json.dump(json_data, f)
diff --git a/server/map/get_lake.py b/server/map/get_lake.py
new file mode 100644
index 00000000..a5c07d19
--- /dev/null
+++ b/server/map/get_lake.py
@@ -0,0 +1,110 @@
+import geopandas as gpd
+from shapely.geometry import Polygon, MultiPolygon
+import json
+from server.map.add_new_lake import write_json_to_file
+
+
+# Writes contents of a map json file to the response
+def fetch_divided_map(self, file_name):
+    self.send_response(200)
+    self.send_header("Content-type", "application/json")
+    self.end_headers()
+
+    # Extract contents from JSON file
+    with open("server/lake_relations/" + file_name + "_div.json", "r") as file:
+        data = file.read()
+
+    # Write contents of the JSON file to response
+    self.wfile.write(data.encode('utf-8'))
+
+
+# Create groups creates polygons which consist of groupings of related subdivisions
+def create_groups(relation_file: str, data: list):
+    try:
+        print("Creating groups...")
+
+        # Read lake relation from json file
+        geo_data = gpd.read_file("server/lake_relations/" + relation_file + "_div.json")
+        relation_data = geo_data[geo_data['geometry'].geom_type == 'Polygon']
+
+        # Loop through each measurement and create groupings of subdivisions
+        for measurement in data:
+            subdiv_list = []
+
+            for subdivision in measurement['Subdivisions']:
+                subDivID = str(subdivision['SubdivID'])  # Convert to string to match format in feature
+                group_id = subdivision['GroupID']  # Extract group ID
+
+                # Find the matching subdivision in relation_data
+                for index, feature in relation_data.iterrows():
+                    # Add the new group ID to the correct subdivision
+                    if feature['sub_div_id'] == subDivID:
+                        subdiv_list.append((group_id, Polygon([feature['coordinates']])))
+
+            # Sort subdiv_list based on group_ids
+            sorted_list = sorted(subdiv_list, key=lambda x: x[0])
+
+            current_group = -1  # Current group_id
+            new_shape = []      # List of subdivision geometries for current group
+
+            # Merge subdivisions in a given group
+            for element in sorted_list:
+                # If the subdivision still belongs to the current group
+                if element[0] == current_group:
+                    new_shape.append(element[1])
+
+                # New group id is found
+                elif len(new_shape) > 1:
+                    # Merger all subdivisions for previous group into a single shape
+                    merged_polygon = MultiPolygon(new_shape).buffer(0)
+
+                    # Convert to Polygon
+                    if isinstance(merged_polygon, MultiPolygon):
+                        merged_polygon = merged_polygon.convex_hull
+
+                    # Structure the new polygon
+                    merged_polygon_structure = {
+                        "type": "Feature",
+                        "properties": {
+                            "group_id": current_group,
+                        },
+                        "geometry": {
+                            "type": "Polygon",
+                            "coordinates": [list(merged_polygon.exterior.coords)]
+                        }
+                    }
+
+                    # Append new polygon to relation data
+                    relation_data = relation_data.append(merged_polygon_structure, ignore_index=True)
+
+                    # Update current group to new group_id and reset new_shape for next group
+                    current_group = element[0]
+                    new_shape = [element[1]]
+
+        # Convert GeoDataFrame to JSON
+        relation_data_json = json.loads(relation_data.to_json())
+
+        # Write relation with group shapes to file
+        write_json_to_file("server/lake_relations", "mjosa", relation_data_json)
+
+    except Exception as e:
+        print(f"Error in create_groups(): {e}")
+
+
+# Returns a list of [(sub_div_id, sub_div_center)]
+def get_id_and_center(file_name):  # NB buggy
+    # Expected format: [(id, [x,y]), (id, [x,y])]
+    geo_data = gpd.read_file("server/lake_relations/" + file_name + "_div.json")
+    subdivisions = []
+    for index, row in geo_data.iterrows():
+        sub_div_id = row['sub_div_id']
+        sub_div_center = row['sub_div_center']
+
+        print("sub_div_id: ", sub_div_id)
+
+        subdivision = {
+            'sub_div_id': sub_div_id,
+            'sub_div_center': sub_div_center
+        }
+        subdivisions.append(subdivision)
+    return subdivisions
diff --git a/server/map/get_measurements.py b/server/map/get_measurements.py
new file mode 100644
index 00000000..9b429324
--- /dev/null
+++ b/server/map/get_measurements.py
@@ -0,0 +1,178 @@
+import json
+from datetime import datetime
+import random
+import geopandas as gpd
+from server.map.add_new_lake import write_json_to_file
+from server.map.get_lake import create_groups
+
+
+# get_markers requests all marker data or valid markers, converts the data to json, and writes
+# the data to the response object
+def get_all_markers(self, cursor, waterBodyName):
+    try:
+        sql_query = '''
+            SELECT m.MeasurementID, m.SensorID, m.TimeMeasured, m.CenterLat, m.CenterLon,
+                   s.SensorType, s.Active, 
+                   b.Name,
+                   d.SubDivisionID, d.GroupID, d.MinimumThickness, 
+                   d.AverageThickness, d.CenterLatitude, d.CenterLongitude, 
+                   d.Accuracy
+            FROM Measurement m
+            INNER JOIN Sensor s ON m.SensorID = s.SensorID
+            INNER JOIN BodyOfWater b ON m.WaterBodyName = b.Name
+            LEFT JOIN SubDivision d ON m.MeasurementID = d.MeasurementID
+            WHERE b.Name = 'Mjosa'
+        '''
+
+        cursor.execute(sql_query)
+
+        rows = cursor.fetchall()
+
+        # Container for all fetched measurement objects
+        measurement_data = {}
+
+        # Iterate over all fetched rows
+        for row in rows:
+            measurement_id = row[0]
+
+            # Create subdivision new object
+            sub_division = {
+                'SubdivID': row[8],
+                'GroupID': row[9],
+                'MinThickness': row[10],
+                'AvgThickness': row[11],
+                'CenLatitude': row[12],
+                'CenLongitude': row[13],
+                'Accuracy': row[14],
+                'Color': calculateColor(row[11])  # NB color calculated based on average thickness, should be minimum
+            }
+
+            # Check if measurement ID already exists in measurement_data
+            if measurement_id in measurement_data:
+                # Create new subdivision within measurement if it does not already exist
+                if sub_division not in measurement_data[measurement_id]['Subdivisions']:
+                    measurement_data[measurement_id]['Subdivisions'].append(sub_division)
+
+            else:
+                # Create a new entry for measurement_id if it does not already exist in the list
+                measurement_data[measurement_id] = {
+                    'MeasurementID': measurement_id,
+                    'TimeMeasured': row[2],
+                    'CenterLat': row[3],
+                    'CenterLon': row[4],
+                    'Sensor': {  # Each measurement only has one related sensor
+                        'SensorID': row[1],
+                        'SensorType': row[5],
+                        'Active': bool(row[6])
+                    },
+                    'Subdivisions': [sub_division],  # Array of sub_division objects
+                }
+
+        ##################################### TEST DATA ###########################################
+        # Temporary test data
+        test_measurements = []
+        subdiv_id = 17
+
+        for i in range(3, 10):
+            sub_divisions = []
+
+            for j in range(0, 30):
+                min_thickness = random.uniform(0, 10)
+                avg_thickness = random.uniform(0, 15) + min_thickness
+
+                subdivision = {
+                    'SubdivID': subdiv_id,
+                    'GroupID': 1,
+                    'MinThickness': min_thickness,
+                    'AvgThickness': avg_thickness,
+                    'CenLatitude': 7.0,
+                    'CenLongitude': 8.0,
+                    'Accuracy': 1.0,
+                    'Color': calculateColor(avg_thickness)
+                }
+
+                sub_divisions.append(subdivision)
+                subdiv_id += 1
+
+            measurement = {
+                'MeasurementID': i,
+                'TimeMeasured': str(datetime.now()),
+                'CenterLat': 10.0,
+                'CenterLon': 8.0,
+                'Sensor': {
+                    'SensorID': 1,
+                    'SensorType': "test data",
+                    'Active': True
+                },
+                'Subdivisions': sub_divisions
+            }
+
+            test_measurements.append(measurement)
+        ##################################### TEST DATA ###########################################
+
+        # Convert dictionary values to list of measurements
+        data = list(measurement_data.values()) + test_measurements
+
+        # NB temporary placement
+        #create_groups("mjosa", data)
+
+        # Read lake relation from json file
+        geo_data = gpd.read_file("server/lake_relations/mjosa_div.json")
+        relation_data = geo_data[geo_data['geometry'].geom_type == 'Polygon']
+
+        # Add group IDs to lake relation
+        for measurement in data:
+            measurement_id = str(measurement['MeasurementID'])  # Extract measurement ID
+            for subdivision in measurement['Subdivisions']:
+                subDivID = str(subdivision['SubdivID'])  # Convert to string to match format in feature
+
+                group_id = subdivision['GroupID']  # Extract group ID
+                new_group_id = str(measurement_id) + "-" + str(group_id)  # Create concatenated group ID
+
+                # Find the matching subdivision in relation_data
+                for index, feature in relation_data.iterrows():
+                    # Add the new group ID to the correct subdivision
+                    if feature['sub_div_id'] == subDivID:
+                        # Update group_id and measurement_id within the properties
+                        relation_data.at[index, 'group_id'] = new_group_id
+                        relation_data.at[index, 'measurement_id'] = measurement_id
+                        # relation_data.at[index, 'sub_div_center'] = feature['sub_div_center']
+
+        # Convert GeoDataFrame to JSON and update json file
+        relation_data_json = json.loads(relation_data.to_json())
+
+        write_json_to_file("server/lake_relations", "mjosa", relation_data_json)
+
+        ####################################################################################
+
+        if len(rows) == 0 or len(data) == 0:  # Return 500 and empty list if no data is found
+            print(f"No data which meets the condition found")
+            marker_data = '[]'
+        else:
+            # Convert list of dictionaries to JSON
+            marker_data = json.dumps(data, indent=4)
+
+    except Exception as e:
+        print(f"Error get_measurements(): {e}")
+        marker_data = '[]'
+
+    # Set headers
+    self.send_response(200)
+    self.send_header("Content-type", "application/json")
+    self.end_headers()
+
+    # Write both measurement data and relation data to the response object
+    self.wfile.write(marker_data.encode('utf-8'))
+
+
+def calculateColor(thickness: float):  # NB not final colors nor ranges
+    if 0 < thickness <= 4:
+        return 0xFFff0000  # Red
+    elif 4 < thickness <= 6:
+        return 0xffff6a00  # Orange
+    elif 6 < thickness <= 8:
+        return 0xFFb1ff00  # Green
+    elif thickness > 8:
+        return 0xFF00d6ff  # Blue
+    else:
+        return 0xFF939393  # Gray
-- 
GitLab