diff --git a/server/map_handler/add_lake.py b/server/map_handler/add_lake.py index 2d6b13120f878a7bd272644e6fef269ccd82fcad..80bdc2c3bd0bafa717267866f54a08403052c4ff 100644 --- a/server/map_handler/add_lake.py +++ b/server/map_handler/add_lake.py @@ -139,6 +139,7 @@ def write_json_to_file(path: str, file_name: str, json_data: dict): with open(path + '/' + file_name + '_div.json', 'w') as f: json.dump(json_data, f) +<<<<<<< HEAD:server/map_handler/add_lake.py def get_divided_map(file_name): geo_data = gpd.read_file("server/map_handler/" + file_name + ".geojson") @@ -164,3 +165,5 @@ def get_id_and_center(file_name): # NB buggy subdivisions.append(subdivision) return subdivisions +======= +>>>>>>> b0c6d9be39ffc9557a93873ab827bf05847ef1c1:server/map/add_lake.py diff --git a/server/map_handler/process_lake.py b/server/map_handler/process_lake.py index 8e1025919a4a9184698b580304d8c8d9e6e52c06..d018938571fa67ee33cadbf333956c064fb8b505 100644 --- a/server/map_handler/process_lake.py +++ b/server/map_handler/process_lake.py @@ -1,11 +1,7 @@ import geopandas as gpd from shapely.geometry import Polygon, LineString, MultiLineString -from shapely.ops import linemerge, unary_union, polygonize -import matplotlib.pyplot as plt -import random -import math import json -import os +from server.map.add_lake import write_json_to_file # Writes contents of a map_handler json file to the response @@ -40,3 +36,77 @@ def get_ids_and_centers(file_name): # NB buggy subdivisions.append(subdivision) return subdivisions +# Create groups creates polygons which consist of groupings of related subdivisions +def create_groups(relation_file: str, data: list): + try: + print("Creating groups...") + + # Read lake relation from json file + geo_data = gpd.read_file("server/lake_relations/" + relation_file + "_div.json") + relation_data = geo_data[geo_data['geometry'].geom_type == 'Polygon'] + + # Loop through each measurement and create groupings of subdivisions + for measurement in data: + subdiv_list = [] + + for subdivision in measurement['Subdivisions']: + subDivID = str(subdivision['SubdivID']) # Convert to string to match format in feature + group_id = subdivision['GroupID'] # Extract group ID + + # Find the matching subdivision in relation_data + for index, feature in relation_data.iterrows(): + # Add the new group ID to the correct subdivision + if feature['sub_div_id'] == subDivID: + subdiv_list.append((group_id, Polygon(feature['geometry'].exterior.coords))) + # subdiv_list.append((group_id, Polygon([feature['geometry']]))) + + # Sort subdiv_list based on group_ids + sorted_list = sorted(subdiv_list, key=lambda x: x[0]) + + current_group = -1 # Current group_id + new_shape = [] # List of subdivision geometries for current group + + # Merge subdivisions in a given group + for element in sorted_list: + # If the subdivision still belongs to the current group + if element[0] == current_group: + new_shape.append(element[1]) + + # New group id is found + elif len(new_shape) > 1: + # Merger all subdivisions for previous group into a single shape + merged_polygon = MultiPolygon(new_shape).buffer(0) + + # Convert to Polygon + if isinstance(merged_polygon, MultiPolygon): + merged_polygon = merged_polygon.convex_hull + + # Structure the new polygon + merged_polygon_structure = { + "type": "Feature", + "properties": { + "group_id": current_group, + }, + "geometry": { + "type": "Polygon", + "coordinates": [list(merged_polygon.exterior.coords)] + } + } + + # Append new polygon to relation data + relation_data = relation_data.append(merged_polygon_structure, ignore_index=True) + + # Update current group to new group_id and reset new_shape for next group + current_group = element[0] + new_shape = [element[1]] + + # Convert GeoDataFrame to JSON + relation_data_json = json.loads(relation_data.to_json()) + + # Write relation with group shapes to file + write_json_to_file("server/lake_relations", "mjosa", relation_data_json) + + except Exception as e: + print(f"Error in create_groups(): {e}") + +