diff --git a/server/map/add_lake.py b/server/map/add_lake.py index e8b715481408748f66adff914f4e5f596faf0959..a57087e4e5578f0890371950bd3fd886951f1e62 100644 --- a/server/map/add_lake.py +++ b/server/map/add_lake.py @@ -142,28 +142,3 @@ def write_json_to_file(path: str, file_name: str, json_data: dict): with open(path + '/' + file_name + '_div.json', 'w') as f: json.dump(json_data, f) - -def get_divided_map(file_name): - geo_data = gpd.read_file("server/map/" + file_name + ".geojson") - polygon_data = geo_data[geo_data['geometry'].geom_type == 'Polygon'] - polygons = [Polygon(polygon.exterior) for polygon in polygon_data['geometry']] - - -# Returns a list of [(sub_div_id, sub_div_center)] -def get_id_and_center(file_name): # NB buggy - # Expected format: [(id, [x,y]), (id, [x,y])] - geo_data = gpd.read_file("server/lake_relations/" + file_name + "_div.json") - subdivisions = [] - for index, row in geo_data.iterrows(): - sub_div_id = row['sub_div_id'] - sub_div_center = row['sub_div_center'] - - print("sub_div_id: ", sub_div_id) - - subdivision = { - 'sub_div_id': sub_div_id, - 'sub_div_center': sub_div_center - } - subdivisions.append(subdivision) - return subdivisions - diff --git a/server/map/process_lake.py b/server/map/process_lake.py index b2f7b85c24eb94025c791d348457914282a4df3d..6c250aace1d8d537ff1692486abdf2213c8b5f54 100644 --- a/server/map/process_lake.py +++ b/server/map/process_lake.py @@ -1,11 +1,7 @@ import geopandas as gpd from shapely.geometry import Polygon, LineString, MultiLineString -from shapely.ops import linemerge, unary_union, polygonize -import matplotlib.pyplot as plt -import random -import math import json -import os +from server.map.add_lake import write_json_to_file # Writes contents of a map json file to the response @@ -40,3 +36,77 @@ def get_ids_and_centers(file_name): # NB buggy subdivisions.append(subdivision) return subdivisions +# Create groups creates polygons which consist of groupings of related subdivisions +def create_groups(relation_file: str, data: list): + try: + print("Creating groups...") + + # Read lake relation from json file + geo_data = gpd.read_file("server/lake_relations/" + relation_file + "_div.json") + relation_data = geo_data[geo_data['geometry'].geom_type == 'Polygon'] + + # Loop through each measurement and create groupings of subdivisions + for measurement in data: + subdiv_list = [] + + for subdivision in measurement['Subdivisions']: + subDivID = str(subdivision['SubdivID']) # Convert to string to match format in feature + group_id = subdivision['GroupID'] # Extract group ID + + # Find the matching subdivision in relation_data + for index, feature in relation_data.iterrows(): + # Add the new group ID to the correct subdivision + if feature['sub_div_id'] == subDivID: + subdiv_list.append((group_id, Polygon(feature['geometry'].exterior.coords))) + # subdiv_list.append((group_id, Polygon([feature['geometry']]))) + + # Sort subdiv_list based on group_ids + sorted_list = sorted(subdiv_list, key=lambda x: x[0]) + + current_group = -1 # Current group_id + new_shape = [] # List of subdivision geometries for current group + + # Merge subdivisions in a given group + for element in sorted_list: + # If the subdivision still belongs to the current group + if element[0] == current_group: + new_shape.append(element[1]) + + # New group id is found + elif len(new_shape) > 1: + # Merger all subdivisions for previous group into a single shape + merged_polygon = MultiPolygon(new_shape).buffer(0) + + # Convert to Polygon + if isinstance(merged_polygon, MultiPolygon): + merged_polygon = merged_polygon.convex_hull + + # Structure the new polygon + merged_polygon_structure = { + "type": "Feature", + "properties": { + "group_id": current_group, + }, + "geometry": { + "type": "Polygon", + "coordinates": [list(merged_polygon.exterior.coords)] + } + } + + # Append new polygon to relation data + relation_data = relation_data.append(merged_polygon_structure, ignore_index=True) + + # Update current group to new group_id and reset new_shape for next group + current_group = element[0] + new_shape = [element[1]] + + # Convert GeoDataFrame to JSON + relation_data_json = json.loads(relation_data.to_json()) + + # Write relation with group shapes to file + write_json_to_file("server/lake_relations", "mjosa", relation_data_json) + + except Exception as e: + print(f"Error in create_groups(): {e}") + +