From 616f4a3cba5bfdb59ffad2a0b9fd5dae74a1e7fb Mon Sep 17 00:00:00 2001 From: Sara <sarasdj@stud.ntnu.no> Date: Thu, 4 Apr 2024 15:56:47 +0200 Subject: [PATCH] add: /get_all_lakes endpoint --- app/lib/consts.dart | 2 + app/lib/utils/custom_search_delegate.dart | 17 +--- server/main.py | 11 +++ .../__pycache__/process_lake.cpython-311.pyc | Bin 4689 -> 1994 bytes .../lake_relations/added_lakes.txt | 1 - .../lake_relations/all_lake_names.json | 10 +++ .../lake_relations/all_system_relations.txt | 2 - server/map_handler/process_lake.py | 77 +----------------- 8 files changed, 28 insertions(+), 92 deletions(-) delete mode 100644 server/map_handler/lake_relations/added_lakes.txt create mode 100644 server/map_handler/lake_relations/all_lake_names.json delete mode 100644 server/map_handler/lake_relations/all_system_relations.txt diff --git a/app/lib/consts.dart b/app/lib/consts.dart index 97d7ba83..13b7ca3b 100644 --- a/app/lib/consts.dart +++ b/app/lib/consts.dart @@ -17,6 +17,8 @@ List<Measurement> selectedMarkerList = []; LatLng mapCenter = LatLng(60.8000, 10.8471); DateTime ?lastUpdate; // Last time data was fetched from server +List<String> lakeSearchOptions = []; // Init empty + // Font settings const textColor = Colors.white; final appTitleStyle = GoogleFonts.chakraPetch( diff --git a/app/lib/utils/custom_search_delegate.dart b/app/lib/utils/custom_search_delegate.dart index 19366184..13c62cf9 100644 --- a/app/lib/utils/custom_search_delegate.dart +++ b/app/lib/utils/custom_search_delegate.dart @@ -1,6 +1,8 @@ import 'package:fuzzy/fuzzy.dart'; import 'package:flutter/material.dart'; +import '../consts.dart'; + typedef SearchResultCallback = void Function(String result); class CustomSearchDelegate extends SearchDelegate { @@ -8,17 +10,6 @@ class CustomSearchDelegate extends SearchDelegate { CustomSearchDelegate(this.onResultSelected); - List<String> searchItems = [ // NB temp values - "Mjøsa", - "Bogstadsvannet", - "Einavatnet", - "Femsjøen", - "Femunden", - "Fjellsjøen", - "Gjende", - "Gjersjøen" - ]; - @override List<Widget> buildActions(BuildContext context) { return [ @@ -45,7 +36,7 @@ class CustomSearchDelegate extends SearchDelegate { Widget buildResults(BuildContext context) { List<String> searchResults = []; final options = FuzzyOptions(threshold: 0.4, findAllMatches: true); - final matcher = Fuzzy(searchItems, options: options); + final matcher = Fuzzy(lakeSearchOptions, options: options); final results = matcher.search(query); searchResults = results.map((result) => result.item as String).toList(); @@ -70,7 +61,7 @@ class CustomSearchDelegate extends SearchDelegate { Widget buildSuggestions(BuildContext context) { List<String> searchResults = []; final options = FuzzyOptions(threshold: 0.4, findAllMatches: true); - final matcher = Fuzzy(searchItems, options: options); + final matcher = Fuzzy(lakeSearchOptions, options: options); final results = matcher.search(query); searchResults = results.map((result) => result.item as String).toList(); diff --git a/server/main.py b/server/main.py index f730b9e9..820e58ac 100644 --- a/server/main.py +++ b/server/main.py @@ -7,6 +7,7 @@ from map_handler.process_lake import fetch_divided_map from map_handler.input_new_data import input_new_Lidar_data from urllib.parse import urlparse, parse_qs import ssl +import json import sqlite3 app = Flask(__name__) @@ -38,7 +39,17 @@ class IceHTTP(BaseHTTPRequestHandler): self.end_headers() self.wfile.write(b"Root path hit!") + elif self.path == '/get_lake_names': + with open('server/map_handler/lake_relations/all_lake_names.json', 'r') as file: + lake_names = json.load(file) + json_data = json.dumps(lake_names, ensure_ascii=False) + + self.send_response(200) + self.send_header('Content-type', 'application/json') + self.end_headers() + + self.wfile.write(json_data.encode('iso-8859-1')) elif self.path.startswith('/update_map'): # NB: should be POST? parsed_path = urlparse(self.path) query_params = parse_qs(parsed_path.query) diff --git a/server/map_handler/__pycache__/process_lake.cpython-311.pyc b/server/map_handler/__pycache__/process_lake.cpython-311.pyc index 5a0aa0d8078879c49ca64ebba1a48b02f6626337..8789c1778de26f2fba48817f2cc609251a53097c 100644 GIT binary patch delta 128 zcmcbpa*AJlIWI340}uqP<4gO<!octt#DM`ODC4u;M0Gn>#$X0bwvAQUY)qP5la&Q^ z*lw|873b$o4iVH*T**+R4pdwu4kUhY*yQG?l;)(`73l)Gj6htRGkLRM8Xp5cqr?Xg H0agqEj*l8s delta 2666 zcmaJ?T})fa6`t$g>tBNTu`z#kurX#?1KTu^2ux56gl1tArD}H5W?kbMJ2u8fb1w<d zd+Vs$2U_u9MARZRmHZH;uCfpGVWm8*)I2r2`*g2$CD2tPMXL5;-@J+<MSa=M*cd}L z?fBkv=gge>&Uen78Gq9x`vQNp+01~nxj)zw%?<$mO_h4n9d%wU?gYYZ24sdtWJ%~C z`<d>R6*4zK(De88vf-R+!zdHuQ@Lz3k;)+4PS(XWbrkZYF0|x*)k7fv1WbTucE0Y) z^K|T}rH}2QMgW^+6l$q_%mDC&ytO831oDHrj`{#dA7eoMauuU7Cu#*I0t(FSgs4jb zQ7`DAp)nhAg4-LSfj1pv0l@%`9boqdroCM>5`}G8i>#n6>UWo!voHc~&-{wG5>296 zFhf(rNHEqY;B$h$XlURzdIHNgozTpuLSgGOX%pp;PiRg@&w%aO?M=R|ZU%s$hvv_~ z=a$EW&XeN|iWX=Utt4WbU?F7xDcS`)w4I=7J)+qq*!~AiJG6H&G!w4b?Lppo!lUgp zCum=;mI?kn-*5ngx2%e`oqx2gyfr8`@!cnYn|N14I~@=<3Qg(byopV)37!$GDF*6! zZ(|Lyc{_ti1~xy`K*vL^c!uoXySL;FZ27$Pv_n!kr!pp5iEeKMm}ErT_VWU>eTVOB z5J-~)RBOan3+e4<nciKa#-10O`CxsHRCXI|7n&cF%6)QNy$pch2IvF)V8hM_aJT5- zkE@@obPA3?gTMT!E<PcvMaOR)6r<L=$J_(!+E2heCg@tsZ#phxL{4Zn=}F?;+A0?c zg*Mwcgzq9eoQW-?X#E6p!<Zfjl})1D3gYnwrW94{jkP71x|`z2A?H-Krp-rG;~a&X zk6<^kil1N#Xf@Itydp`-lS3~6$TSj2?hef&rZ#^AK}KcPIl9(^X{G&#oyf#imS8Nr z*&C;{QS^xPL?#!HWw>ytZg7d1bM$t(51vjxjeQxp4{(5}1X8<<Hop6TG~y)>z;_g1 zHO>;aHH=7jly@`*gBn%GZ>%EK63^u@Ol65XE~v$0)ZDy^bE^pRDa2vAFFz-^)>JZ^ z!zh}{LbSfw_Rbut`jvE!i*34RFwS95DxdY73S=O7(GzUPy#x|>Q9;jvYN9ERR>^2N z9^)Q5<DsIu1uR<U5l-c>YN%4>R0Eg8Jc6n=gR+>OLe;RemPjD1n#o3GIX;$+R|Csu z@1phSowZCx)n!r~uj<lVF01M@xftYBBcF>>9qTECX=Xi+R_SFPY{3K6JH5`am(Dfr zc^Knks$~VmIFij3l;u?mSFMmJ#m9bX&61Hw>Mkjc{CNt|M+c3GqaCo+$XD%ETYL>; zf>PUP)s#i|qTHPr*%l+p)bQ;q2qzJYuI4fuNkZOrT8|R1Yw_ydI_k9?r7US^`K-zy zj#i`R$E{j(gu%&5!#9a74W8R0HF_UZG&78T<OOS!>}O@RPhtBcw(qq8v^pP5O5O`k z7i91JS65~4O}XV`rRC!-_R!@ix=MqJ_os??>RJ9m+54g5TG+Bx44}(X0q0Ebe^15M z)kAmJ*43B2gQc+2d$9so8)9m*V0qEmv-g|QMWy$=+&QLnjuqI$&i>+t(m7gS2~ObL zLH|U#e?smLEB)aL&^FVhRPHPmX7<KjI(zm6#W}Rkmh;YW$vJ-L>N{`^mR*CTD_<m^ zCZDC1@O348O9{v1^GnM4CF#Am?1G95N-p@OVx&l~?V!_J?A#CSha~s3?4DNK(}ZqU z&(^}B%e^&E!wFPCukHP+C>US1xc5dMTr75%TS8Jx=&-f>pw(B-xB7}VOV=OWl3PcV z))A@tko2{8>~-vX`ry-ItlSom+5+GDNB4uWe?sw39QZGn{g<Cj%l=u#KU-KN)~>U~ z&=*6pb4qbeRRGf#VGe_1O7QZYy}|<T;LEQ5lC7fE1l&aRhh7Yg?Dr_6SCr9@<owVz zW$2pp-t~%J>vxkR5n(Znv{wKTQtME~1^gcz_{Ph=ap}T!>DN&5jmtiy_)uYigy<hC zO+WfsVd2m_P-=eUE?NuoZ@!-*LBEbLpy%wvTP54Uz+`z~QXZI62BzfhOG@`8iFFgs zrcSBzgZ=!RH2IOtUQ^g>QvG@Jt)-Pjp&x08YJG)Dqmn&D9+@3h*kS2i&l}PzTC$dW zIr^{pyE*TiA3XP)uUNIuhaFdp+OLdy(hu6Ix61Cn3;{i!C5F2u<Iqp=B<bN9DrlYK wVJhggS-*Z)a&Rq!!uXeDjNY5vA9sIYOpN<x8D^x|QUUZ^iqyXUUshrN3y!d`>i_@% diff --git a/server/map_handler/lake_relations/added_lakes.txt b/server/map_handler/lake_relations/added_lakes.txt deleted file mode 100644 index b10ac55a..00000000 --- a/server/map_handler/lake_relations/added_lakes.txt +++ /dev/null @@ -1 +0,0 @@ -mjosa \ No newline at end of file diff --git a/server/map_handler/lake_relations/all_lake_names.json b/server/map_handler/lake_relations/all_lake_names.json new file mode 100644 index 00000000..33022b5c --- /dev/null +++ b/server/map_handler/lake_relations/all_lake_names.json @@ -0,0 +1,10 @@ +[ + "Mjøsa", + "Bogstadsvannet", + "Einavatnet", + "Femsjøen", + "Femunden", + "Fjellsjøen", + "Gjende", + "Gjersjøen" +] diff --git a/server/map_handler/lake_relations/all_system_relations.txt b/server/map_handler/lake_relations/all_system_relations.txt deleted file mode 100644 index 6a20bd4a..00000000 --- a/server/map_handler/lake_relations/all_system_relations.txt +++ /dev/null @@ -1,2 +0,0 @@ -Mjøsa - mjosa -TestLake - testlake \ No newline at end of file diff --git a/server/map_handler/process_lake.py b/server/map_handler/process_lake.py index dd0d202b..bf14018a 100644 --- a/server/map_handler/process_lake.py +++ b/server/map_handler/process_lake.py @@ -34,79 +34,4 @@ def get_ids_and_centers(file_name): # NB buggy 'sub_div_center': sub_div_center } subdivisions.append(subdivision) - return subdivisions - -# Create groups creates polygons which consist of groupings of related subdivisions -def create_groups(relation_file: str, data: list): - try: - print("Creating groups...") - - # Read lake relation from json file - geo_data = gpd.read_file("server/lake_relations/" + relation_file + "_div.json") - relation_data = geo_data[geo_data['geometry'].geom_type == 'Polygon'] - - # Loop through each measurement and create groupings of subdivisions - for measurement in data: - subdiv_list = [] - - for subdivision in measurement['Subdivisions']: - subDivID = str(subdivision['SubdivID']) # Convert to string to match format in feature - group_id = subdivision['GroupID'] # Extract group ID - - # Find the matching subdivision in relation_data - for index, feature in relation_data.iterrows(): - # Add the new group ID to the correct subdivision - if feature['sub_div_id'] == subDivID: - subdiv_list.append((group_id, Polygon(feature['geometry'].exterior.coords))) - # subdiv_list.append((group_id, Polygon([feature['geometry']]))) - - # Sort subdiv_list based on group_ids - sorted_list = sorted(subdiv_list, key=lambda x: x[0]) - - current_group = -1 # Current group_id - new_shape = [] # List of subdivision geometries for current group - - # Merge subdivisions in a given group - for element in sorted_list: - # If the subdivision still belongs to the current group - if element[0] == current_group: - new_shape.append(element[1]) - - # New group id is found - elif len(new_shape) > 1: - # Merger all subdivisions for previous group into a single shape - merged_polygon = MultiPolygon(new_shape).buffer(0) - - # Convert to Polygon - if isinstance(merged_polygon, MultiPolygon): - merged_polygon = merged_polygon.convex_hull - - # Structure the new polygon - merged_polygon_structure = { - "type": "Feature", - "properties": { - "group_id": current_group, - }, - "geometry": { - "type": "Polygon", - "coordinates": [list(merged_polygon.exterior.coords)] - } - } - - # Append new polygon to relation data - relation_data = relation_data.append(merged_polygon_structure, ignore_index=True) - - # Update current group to new group_id and reset new_shape for next group - current_group = element[0] - new_shape = [element[1]] - - # Convert GeoDataFrame to JSON - relation_data_json = json.loads(relation_data.to_json()) - - # Write relation with group shapes to file - write_json_to_file("server/lake_relations", "mjosa", relation_data_json) - - except Exception as e: - print(f"Error in create_groups(): {e}") - - + return subdivisions \ No newline at end of file -- GitLab