Skip to content
Snippets Groups Projects
Commit 616f4a3c authored by Sara Savanovic Djordjevic's avatar Sara Savanovic Djordjevic
Browse files

add: /get_all_lakes endpoint

parent 661b559d
No related branches found
No related tags found
1 merge request!10Clhp map
...@@ -17,6 +17,8 @@ List<Measurement> selectedMarkerList = []; ...@@ -17,6 +17,8 @@ List<Measurement> selectedMarkerList = [];
LatLng mapCenter = LatLng(60.8000, 10.8471); LatLng mapCenter = LatLng(60.8000, 10.8471);
DateTime ?lastUpdate; // Last time data was fetched from server DateTime ?lastUpdate; // Last time data was fetched from server
List<String> lakeSearchOptions = []; // Init empty
// Font settings // Font settings
const textColor = Colors.white; const textColor = Colors.white;
final appTitleStyle = GoogleFonts.chakraPetch( final appTitleStyle = GoogleFonts.chakraPetch(
......
import 'package:fuzzy/fuzzy.dart'; import 'package:fuzzy/fuzzy.dart';
import 'package:flutter/material.dart'; import 'package:flutter/material.dart';
import '../consts.dart';
typedef SearchResultCallback = void Function(String result); typedef SearchResultCallback = void Function(String result);
class CustomSearchDelegate extends SearchDelegate { class CustomSearchDelegate extends SearchDelegate {
...@@ -8,17 +10,6 @@ class CustomSearchDelegate extends SearchDelegate { ...@@ -8,17 +10,6 @@ class CustomSearchDelegate extends SearchDelegate {
CustomSearchDelegate(this.onResultSelected); CustomSearchDelegate(this.onResultSelected);
List<String> searchItems = [ // NB temp values
"Mjøsa",
"Bogstadsvannet",
"Einavatnet",
"Femsjøen",
"Femunden",
"Fjellsjøen",
"Gjende",
"Gjersjøen"
];
@override @override
List<Widget> buildActions(BuildContext context) { List<Widget> buildActions(BuildContext context) {
return [ return [
...@@ -45,7 +36,7 @@ class CustomSearchDelegate extends SearchDelegate { ...@@ -45,7 +36,7 @@ class CustomSearchDelegate extends SearchDelegate {
Widget buildResults(BuildContext context) { Widget buildResults(BuildContext context) {
List<String> searchResults = []; List<String> searchResults = [];
final options = FuzzyOptions(threshold: 0.4, findAllMatches: true); final options = FuzzyOptions(threshold: 0.4, findAllMatches: true);
final matcher = Fuzzy(searchItems, options: options); final matcher = Fuzzy(lakeSearchOptions, options: options);
final results = matcher.search(query); final results = matcher.search(query);
searchResults = results.map((result) => result.item as String).toList(); searchResults = results.map((result) => result.item as String).toList();
...@@ -70,7 +61,7 @@ class CustomSearchDelegate extends SearchDelegate { ...@@ -70,7 +61,7 @@ class CustomSearchDelegate extends SearchDelegate {
Widget buildSuggestions(BuildContext context) { Widget buildSuggestions(BuildContext context) {
List<String> searchResults = []; List<String> searchResults = [];
final options = FuzzyOptions(threshold: 0.4, findAllMatches: true); final options = FuzzyOptions(threshold: 0.4, findAllMatches: true);
final matcher = Fuzzy(searchItems, options: options); final matcher = Fuzzy(lakeSearchOptions, options: options);
final results = matcher.search(query); final results = matcher.search(query);
searchResults = results.map((result) => result.item as String).toList(); searchResults = results.map((result) => result.item as String).toList();
......
...@@ -7,6 +7,7 @@ from map_handler.process_lake import fetch_divided_map ...@@ -7,6 +7,7 @@ from map_handler.process_lake import fetch_divided_map
from map_handler.input_new_data import input_new_Lidar_data from map_handler.input_new_data import input_new_Lidar_data
from urllib.parse import urlparse, parse_qs from urllib.parse import urlparse, parse_qs
import ssl import ssl
import json
import sqlite3 import sqlite3
app = Flask(__name__) app = Flask(__name__)
...@@ -38,7 +39,17 @@ class IceHTTP(BaseHTTPRequestHandler): ...@@ -38,7 +39,17 @@ class IceHTTP(BaseHTTPRequestHandler):
self.end_headers() self.end_headers()
self.wfile.write(b"Root path hit!") self.wfile.write(b"Root path hit!")
elif self.path == '/get_lake_names':
with open('server/map_handler/lake_relations/all_lake_names.json', 'r') as file:
lake_names = json.load(file)
json_data = json.dumps(lake_names, ensure_ascii=False)
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
self.wfile.write(json_data.encode('iso-8859-1'))
elif self.path.startswith('/update_map'): # NB: should be POST? elif self.path.startswith('/update_map'): # NB: should be POST?
parsed_path = urlparse(self.path) parsed_path = urlparse(self.path)
query_params = parse_qs(parsed_path.query) query_params = parse_qs(parsed_path.query)
......
No preview for this file type
mjosa
\ No newline at end of file
[
"Mjøsa",
"Bogstadsvannet",
"Einavatnet",
"Femsjøen",
"Femunden",
"Fjellsjøen",
"Gjende",
"Gjersjøen"
]
Mjøsa - mjosa
TestLake - testlake
\ No newline at end of file
...@@ -34,79 +34,4 @@ def get_ids_and_centers(file_name): # NB buggy ...@@ -34,79 +34,4 @@ def get_ids_and_centers(file_name): # NB buggy
'sub_div_center': sub_div_center 'sub_div_center': sub_div_center
} }
subdivisions.append(subdivision) subdivisions.append(subdivision)
return subdivisions return subdivisions
\ No newline at end of file
# Create groups creates polygons which consist of groupings of related subdivisions
def create_groups(relation_file: str, data: list):
try:
print("Creating groups...")
# Read lake relation from json file
geo_data = gpd.read_file("server/lake_relations/" + relation_file + "_div.json")
relation_data = geo_data[geo_data['geometry'].geom_type == 'Polygon']
# Loop through each measurement and create groupings of subdivisions
for measurement in data:
subdiv_list = []
for subdivision in measurement['Subdivisions']:
subDivID = str(subdivision['SubdivID']) # Convert to string to match format in feature
group_id = subdivision['GroupID'] # Extract group ID
# Find the matching subdivision in relation_data
for index, feature in relation_data.iterrows():
# Add the new group ID to the correct subdivision
if feature['sub_div_id'] == subDivID:
subdiv_list.append((group_id, Polygon(feature['geometry'].exterior.coords)))
# subdiv_list.append((group_id, Polygon([feature['geometry']])))
# Sort subdiv_list based on group_ids
sorted_list = sorted(subdiv_list, key=lambda x: x[0])
current_group = -1 # Current group_id
new_shape = [] # List of subdivision geometries for current group
# Merge subdivisions in a given group
for element in sorted_list:
# If the subdivision still belongs to the current group
if element[0] == current_group:
new_shape.append(element[1])
# New group id is found
elif len(new_shape) > 1:
# Merger all subdivisions for previous group into a single shape
merged_polygon = MultiPolygon(new_shape).buffer(0)
# Convert to Polygon
if isinstance(merged_polygon, MultiPolygon):
merged_polygon = merged_polygon.convex_hull
# Structure the new polygon
merged_polygon_structure = {
"type": "Feature",
"properties": {
"group_id": current_group,
},
"geometry": {
"type": "Polygon",
"coordinates": [list(merged_polygon.exterior.coords)]
}
}
# Append new polygon to relation data
relation_data = relation_data.append(merged_polygon_structure, ignore_index=True)
# Update current group to new group_id and reset new_shape for next group
current_group = element[0]
new_shape = [element[1]]
# Convert GeoDataFrame to JSON
relation_data_json = json.loads(relation_data.to_json())
# Write relation with group shapes to file
write_json_to_file("server/lake_relations", "mjosa", relation_data_json)
except Exception as e:
print(f"Error in create_groups(): {e}")
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment