diff --git a/server/lake_relations/added_lakes.txt b/server/lake_relations/added_lakes.txt new file mode 100644 index 0000000000000000000000000000000000000000..b10ac55a05d6891518a78a773134b3e89e79779b --- /dev/null +++ b/server/lake_relations/added_lakes.txt @@ -0,0 +1 @@ +mjosa \ No newline at end of file diff --git a/server/map/__pycache__/add_new_lake.cpython-311.pyc b/server/map/__pycache__/add_new_lake.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7b81c96389419f16b137c0fae8b00356a829eb42 Binary files /dev/null and b/server/map/__pycache__/add_new_lake.cpython-311.pyc differ diff --git a/server/map/__pycache__/get_lake.cpython-311.pyc b/server/map/__pycache__/get_lake.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6e6e9b2c952ab484ff4170150fbfbb4af8e8a462 Binary files /dev/null and b/server/map/__pycache__/get_lake.cpython-311.pyc differ diff --git a/server/map/__pycache__/get_measurements.cpython-311.pyc b/server/map/__pycache__/get_measurements.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..553c75246fe5f79b0d08aed421fd6b9a539d05da Binary files /dev/null and b/server/map/__pycache__/get_measurements.cpython-311.pyc differ diff --git a/server/map/add_new_lake.py b/server/map/add_new_lake.py new file mode 100644 index 0000000000000000000000000000000000000000..cae00de1573c0b2c3765e15b914433d8e2fa9ce6 --- /dev/null +++ b/server/map/add_new_lake.py @@ -0,0 +1,144 @@ +import geopandas as gpd +from shapely.geometry import Polygon, LineString, MultiLineString +from shapely.ops import linemerge, unary_union, polygonize +import json +import os + + +# Read a json file with relation data and send to response object +def cut_map(self, body_of_water: str): # NB: implement body_of_water + # Read relation from GeoJson file and extract all polygons + geo_data = gpd.read_file("server/lake_relations/mjosa.geojson") + polygon_data = geo_data[geo_data['geometry'].geom_type == 'Polygon'] + polygons = [Polygon(polygon.exterior) for polygon in polygon_data['geometry']] + + if len(polygons) <= 1: + raise Exception("Failed to convert JSON object to Shapely Polygons") + + divided_map = [] + cell_width = 0 + cell_height = 0 + + for polygon in polygons: + cell_width = 0.04 + cell_height = 0.02 # NB could be calculated based on cell_width and distance from equator + + lines = create_grid(polygon, cell_width, cell_height) + lines.append(polygon.boundary) + lines = unary_union(lines) + lines = linemerge(lines) + lines = list(polygonize(lines)) + + divided_map.extend(combine_grid_with_poly(polygon, lines)) + + ''' + ####################### PLOTTING ############################ + tiles = [gpd.GeoDataFrame(geometry=[tile]) for tile in divided_map] + + print("Plotting... This may take some time...") + # NB test plot + fig, ax = plt.subplots() + ax.set_aspect(1.5) + + # Plot each tile + for tile in tiles: # NB temporarily limited to 5 tiles + random_color = "#{:06x}".format(random.randint(0, 0xFFFFFF)) + gpd.GeoSeries(tile.geometry).plot(ax=ax, facecolor=random_color, edgecolor='none') + + + plt.show() + ##################### PLOTTIND END ########################### + ''' + + features = [] + + sub_div_id = 0 + for tile in divided_map: + + # Calculate tile center based on bounds, and round down to two decimals + min_x, min_y, max_x, max_y = tile.bounds + center = round(max_x - min_x, 4), round(max_y - min_y, 4) + # center = round(tile.centroid.coords[0][0], 4), round(tile.centroid.coords[0][1], 4) + + rounded_coordinates = [] + if isinstance(tile, Polygon): + for coords in tile.exterior.coords: + rounded_coords = (round(coords[0], 4), round(coords[1], 4)) + rounded_coordinates.append(rounded_coords) + rounded_tile = Polygon(rounded_coordinates) + + tile_feature = { + 'type': 'Feature', + 'properties': { + 'sub_div_id': str(sub_div_id), + 'sub_div_center': center, + }, + 'geometry': rounded_tile.__geo_interface__ + } + features.append(tile_feature) + sub_div_id += 1 + + feature_collection = { + 'type': 'FeatureCollection', + 'tile_count': sub_div_id, # Add the last subdivision ID as number of tiles + 'tile_width': cell_width, + 'tile_height': cell_height, + 'features': features, + } + + write_json_to_file("server/lake_relations", "mjosa", feature_collection) + self.send_response(200) + self.send_header("Content-type", "application/json") + self.end_headers() + + self.wfile.write(json.dumps(feature_collection).encode('utf-8')) + + +def create_grid(poly: Polygon, cell_width, cell_height): + # Retrieve bounds of the entire polygon + bounds = poly.bounds + + min_x, min_y, max_x, max_y = bounds + grid_lines = [] + + # Horizontal lines + y = min_y + while y <= max_y: + line = LineString([(min_x, y), (max_x, y)]) + grid_lines.append(line) + y += cell_height + + # Vertical lines + x = min_x + while x <= max_x: + line = LineString([(x, min_y), (x, max_y)]) + grid_lines.append(line) + x += cell_width + + return grid_lines + + +def combine_grid_with_poly(polygon, grid): + intersecting_tiles = [] + + for line in grid: + if line.intersects(polygon): + intersection = line.intersection(polygon) + # Check if intersection is a MultiLineString + if isinstance(intersection, MultiLineString): + # Extend the intersecting tiles with the polygonized results + intersecting_tiles.extend(list(polygonize(intersection))) + else: + intersecting_tiles.append(intersection) + + return intersecting_tiles + + +def write_json_to_file(path: str, file_name: str, json_data: dict): + # NB add lake name to 'added_lakes.txt' + print("Writing to file...") + if not os.path.exists(path): + raise Exception("Directory from path does not exist") + + with open(path + '/' + file_name + '_div.json', 'w') as f: + json.dump(json_data, f) diff --git a/server/map/get_lake.py b/server/map/get_lake.py new file mode 100644 index 0000000000000000000000000000000000000000..a5c07d19910de3e1ad8aecca3ac8715d42b64c89 --- /dev/null +++ b/server/map/get_lake.py @@ -0,0 +1,110 @@ +import geopandas as gpd +from shapely.geometry import Polygon, MultiPolygon +import json +from server.map.add_new_lake import write_json_to_file + + +# Writes contents of a map json file to the response +def fetch_divided_map(self, file_name): + self.send_response(200) + self.send_header("Content-type", "application/json") + self.end_headers() + + # Extract contents from JSON file + with open("server/lake_relations/" + file_name + "_div.json", "r") as file: + data = file.read() + + # Write contents of the JSON file to response + self.wfile.write(data.encode('utf-8')) + + +# Create groups creates polygons which consist of groupings of related subdivisions +def create_groups(relation_file: str, data: list): + try: + print("Creating groups...") + + # Read lake relation from json file + geo_data = gpd.read_file("server/lake_relations/" + relation_file + "_div.json") + relation_data = geo_data[geo_data['geometry'].geom_type == 'Polygon'] + + # Loop through each measurement and create groupings of subdivisions + for measurement in data: + subdiv_list = [] + + for subdivision in measurement['Subdivisions']: + subDivID = str(subdivision['SubdivID']) # Convert to string to match format in feature + group_id = subdivision['GroupID'] # Extract group ID + + # Find the matching subdivision in relation_data + for index, feature in relation_data.iterrows(): + # Add the new group ID to the correct subdivision + if feature['sub_div_id'] == subDivID: + subdiv_list.append((group_id, Polygon([feature['coordinates']]))) + + # Sort subdiv_list based on group_ids + sorted_list = sorted(subdiv_list, key=lambda x: x[0]) + + current_group = -1 # Current group_id + new_shape = [] # List of subdivision geometries for current group + + # Merge subdivisions in a given group + for element in sorted_list: + # If the subdivision still belongs to the current group + if element[0] == current_group: + new_shape.append(element[1]) + + # New group id is found + elif len(new_shape) > 1: + # Merger all subdivisions for previous group into a single shape + merged_polygon = MultiPolygon(new_shape).buffer(0) + + # Convert to Polygon + if isinstance(merged_polygon, MultiPolygon): + merged_polygon = merged_polygon.convex_hull + + # Structure the new polygon + merged_polygon_structure = { + "type": "Feature", + "properties": { + "group_id": current_group, + }, + "geometry": { + "type": "Polygon", + "coordinates": [list(merged_polygon.exterior.coords)] + } + } + + # Append new polygon to relation data + relation_data = relation_data.append(merged_polygon_structure, ignore_index=True) + + # Update current group to new group_id and reset new_shape for next group + current_group = element[0] + new_shape = [element[1]] + + # Convert GeoDataFrame to JSON + relation_data_json = json.loads(relation_data.to_json()) + + # Write relation with group shapes to file + write_json_to_file("server/lake_relations", "mjosa", relation_data_json) + + except Exception as e: + print(f"Error in create_groups(): {e}") + + +# Returns a list of [(sub_div_id, sub_div_center)] +def get_id_and_center(file_name): # NB buggy + # Expected format: [(id, [x,y]), (id, [x,y])] + geo_data = gpd.read_file("server/lake_relations/" + file_name + "_div.json") + subdivisions = [] + for index, row in geo_data.iterrows(): + sub_div_id = row['sub_div_id'] + sub_div_center = row['sub_div_center'] + + print("sub_div_id: ", sub_div_id) + + subdivision = { + 'sub_div_id': sub_div_id, + 'sub_div_center': sub_div_center + } + subdivisions.append(subdivision) + return subdivisions diff --git a/server/map/get_measurements.py b/server/map/get_measurements.py new file mode 100644 index 0000000000000000000000000000000000000000..9b42932481ebdf59e76975eb288ef3adedc6b4f9 --- /dev/null +++ b/server/map/get_measurements.py @@ -0,0 +1,178 @@ +import json +from datetime import datetime +import random +import geopandas as gpd +from server.map.add_new_lake import write_json_to_file +from server.map.get_lake import create_groups + + +# get_markers requests all marker data or valid markers, converts the data to json, and writes +# the data to the response object +def get_all_markers(self, cursor, waterBodyName): + try: + sql_query = ''' + SELECT m.MeasurementID, m.SensorID, m.TimeMeasured, m.CenterLat, m.CenterLon, + s.SensorType, s.Active, + b.Name, + d.SubDivisionID, d.GroupID, d.MinimumThickness, + d.AverageThickness, d.CenterLatitude, d.CenterLongitude, + d.Accuracy + FROM Measurement m + INNER JOIN Sensor s ON m.SensorID = s.SensorID + INNER JOIN BodyOfWater b ON m.WaterBodyName = b.Name + LEFT JOIN SubDivision d ON m.MeasurementID = d.MeasurementID + WHERE b.Name = 'Mjosa' + ''' + + cursor.execute(sql_query) + + rows = cursor.fetchall() + + # Container for all fetched measurement objects + measurement_data = {} + + # Iterate over all fetched rows + for row in rows: + measurement_id = row[0] + + # Create subdivision new object + sub_division = { + 'SubdivID': row[8], + 'GroupID': row[9], + 'MinThickness': row[10], + 'AvgThickness': row[11], + 'CenLatitude': row[12], + 'CenLongitude': row[13], + 'Accuracy': row[14], + 'Color': calculateColor(row[11]) # NB color calculated based on average thickness, should be minimum + } + + # Check if measurement ID already exists in measurement_data + if measurement_id in measurement_data: + # Create new subdivision within measurement if it does not already exist + if sub_division not in measurement_data[measurement_id]['Subdivisions']: + measurement_data[measurement_id]['Subdivisions'].append(sub_division) + + else: + # Create a new entry for measurement_id if it does not already exist in the list + measurement_data[measurement_id] = { + 'MeasurementID': measurement_id, + 'TimeMeasured': row[2], + 'CenterLat': row[3], + 'CenterLon': row[4], + 'Sensor': { # Each measurement only has one related sensor + 'SensorID': row[1], + 'SensorType': row[5], + 'Active': bool(row[6]) + }, + 'Subdivisions': [sub_division], # Array of sub_division objects + } + + ##################################### TEST DATA ########################################### + # Temporary test data + test_measurements = [] + subdiv_id = 17 + + for i in range(3, 10): + sub_divisions = [] + + for j in range(0, 30): + min_thickness = random.uniform(0, 10) + avg_thickness = random.uniform(0, 15) + min_thickness + + subdivision = { + 'SubdivID': subdiv_id, + 'GroupID': 1, + 'MinThickness': min_thickness, + 'AvgThickness': avg_thickness, + 'CenLatitude': 7.0, + 'CenLongitude': 8.0, + 'Accuracy': 1.0, + 'Color': calculateColor(avg_thickness) + } + + sub_divisions.append(subdivision) + subdiv_id += 1 + + measurement = { + 'MeasurementID': i, + 'TimeMeasured': str(datetime.now()), + 'CenterLat': 10.0, + 'CenterLon': 8.0, + 'Sensor': { + 'SensorID': 1, + 'SensorType': "test data", + 'Active': True + }, + 'Subdivisions': sub_divisions + } + + test_measurements.append(measurement) + ##################################### TEST DATA ########################################### + + # Convert dictionary values to list of measurements + data = list(measurement_data.values()) + test_measurements + + # NB temporary placement + #create_groups("mjosa", data) + + # Read lake relation from json file + geo_data = gpd.read_file("server/lake_relations/mjosa_div.json") + relation_data = geo_data[geo_data['geometry'].geom_type == 'Polygon'] + + # Add group IDs to lake relation + for measurement in data: + measurement_id = str(measurement['MeasurementID']) # Extract measurement ID + for subdivision in measurement['Subdivisions']: + subDivID = str(subdivision['SubdivID']) # Convert to string to match format in feature + + group_id = subdivision['GroupID'] # Extract group ID + new_group_id = str(measurement_id) + "-" + str(group_id) # Create concatenated group ID + + # Find the matching subdivision in relation_data + for index, feature in relation_data.iterrows(): + # Add the new group ID to the correct subdivision + if feature['sub_div_id'] == subDivID: + # Update group_id and measurement_id within the properties + relation_data.at[index, 'group_id'] = new_group_id + relation_data.at[index, 'measurement_id'] = measurement_id + # relation_data.at[index, 'sub_div_center'] = feature['sub_div_center'] + + # Convert GeoDataFrame to JSON and update json file + relation_data_json = json.loads(relation_data.to_json()) + + write_json_to_file("server/lake_relations", "mjosa", relation_data_json) + + #################################################################################### + + if len(rows) == 0 or len(data) == 0: # Return 500 and empty list if no data is found + print(f"No data which meets the condition found") + marker_data = '[]' + else: + # Convert list of dictionaries to JSON + marker_data = json.dumps(data, indent=4) + + except Exception as e: + print(f"Error get_measurements(): {e}") + marker_data = '[]' + + # Set headers + self.send_response(200) + self.send_header("Content-type", "application/json") + self.end_headers() + + # Write both measurement data and relation data to the response object + self.wfile.write(marker_data.encode('utf-8')) + + +def calculateColor(thickness: float): # NB not final colors nor ranges + if 0 < thickness <= 4: + return 0xFFff0000 # Red + elif 4 < thickness <= 6: + return 0xffff6a00 # Orange + elif 6 < thickness <= 8: + return 0xFFb1ff00 # Green + elif thickness > 8: + return 0xFF00d6ff # Blue + else: + return 0xFF939393 # Gray