Skip to content
Snippets Groups Projects
Commit 3ad18fe6 authored by Sara Savanovic Djordjevic's avatar Sara Savanovic Djordjevic
Browse files

update: backtrack + new approach

parent 4a5debef
No related branches found
No related tags found
No related merge requests found
This diff is collapsed.
......@@ -2,7 +2,8 @@ from flask import Flask
from http.server import HTTPServer, BaseHTTPRequestHandler
from consts import SSL_CERT_PATH, SSL_KEY_PATH, HOST, PORT
from map.get_measurements import get_all_markers
from map.add_lake import fetch_divided_map
from map.add_new_lake import cut_map
from map.get_lake import fetch_divided_map
from APIs.get_weather import get_weather
from map.input_new_data import input_new_Lidar_data
import ssl
......@@ -40,7 +41,7 @@ class IceHTTP(BaseHTTPRequestHandler):
get_all_markers(self, self.cursor, 'mjosa') # Get all markers
# NB: temporary hardcoded waterBodyName
elif self.path == '/get_relation':
fetch_divided_map(self, 'mjosa') # NB temp hardcoded value
cut_map(self, 'mjosa') # NB temp hardcoded value
def do_POST(self):
if self.path == '/get_weather_data':
......
File added
No preview for this file type
......@@ -69,10 +69,7 @@ def cut_map(self, body_of_water: str): # NB: implement body_of_water
'type': 'Feature',
'properties': {
'sub_div_id': str(sub_div_id),
'group_id': '', # Initialised empty, will be set upon requesting the relation
'measurement_id': '',
'sub_div_center': center,
},
'geometry': rounded_tile.__geo_interface__
}
......@@ -85,7 +82,7 @@ def cut_map(self, body_of_water: str): # NB: implement body_of_water
'tile_count': sub_div_id, # Add the last subdivision ID as number of tiles
}
# write_json_to_file("server/lake_relations", "mjosa", feature_collection)
write_json_to_file("server/lake_relations", "mjosa", feature_collection)
self.send_response(200)
self.send_header("Content-type", "application/json")
self.end_headers()
......@@ -141,35 +138,3 @@ def write_json_to_file(path: str, file_name: str, json_data: dict):
with open(path + '/' + file_name + '_div.json', 'w') as f:
json.dump(json_data, f)
def fetch_divided_map(self, file_name):
self.send_response(200)
self.send_header("Content-type", "application/json")
self.end_headers()
# Extract contents from JSON file
with open("server/lake_relations/" + file_name + "_div.json", "r") as file:
data = file.read()
# Write contents of the JSON file to response
self.wfile.write(data.encode('utf-8'))
# Returns a list of [(sub_div_id, sub_div_center)]
def get_id_and_center(file_name): # NB buggy
# Expected format: [(id, [x,y]), (id, [x,y])]
geo_data = gpd.read_file("server/lake_relations/" + file_name + "_div.json")
subdivisions = []
for index, row in geo_data.iterrows():
sub_div_id = row['sub_div_id']
sub_div_center = row['sub_div_center']
print("sub_div_id: ", sub_div_id)
subdivision = {
'sub_div_id': sub_div_id,
'sub_div_center': sub_div_center
}
subdivisions.append(subdivision)
return subdivisions
import geopandas as gpd
from shapely.geometry import Polygon, LineString, MultiLineString
from shapely.ops import linemerge, unary_union, polygonize
import json
from server.map.add_new_lake import write_json_to_file
# Writes contents of a map json file to the response
def fetch_divided_map(self, file_name):
self.send_response(200)
self.send_header("Content-type", "application/json")
self.end_headers()
# Extract contents from JSON file
with open("server/lake_relations/" + file_name + "_div.json", "r") as file:
data = file.read()
# Write contents of the JSON file to response
self.wfile.write(data.encode('utf-8'))
def create_groups(relation_file: str, data: list):
# Read lake relation from json file
geo_data = gpd.read_file("server/lake_relations/" + relation_file + "_div.json")
relation_data = geo_data[geo_data['geometry'].geom_type == 'Polygon']
new_polygons = []
# Add group IDs to lake relation
for measurement in data:
subdiv_list = []
for subdivision in measurement['Subdivisions']:
subDivID = str(subdivision['SubdivID']) # Convert to string to match format in feature
group_id = subdivision['GroupID'] # Extract group ID
# Find the matching subdivision in relation_data
for index, feature in relation_data.iterrows():
# Add the new group ID to the correct subdivision
if feature['sub_div_id'] == subDivID:
subdiv_list.append((group_id, Polygon([feature['coordinates']])))
# Sort subdiv_list in ascending order of group_ids
# While group_id is same -> merge all polygons and give each a group id
# new_polygons.append((group_id, new_shape))
# Put the coordinates and group id of each new polygon in the expected structure
for polygon in new_polygons:
'''
Expected structure:
{
"type": "Feature",
"properties": {
"group_id": "0",
},
"geometry": {
"type": "Polygon",
"coordinates": [
[
[
10.4304,...
'''
# Convert GeoDataFrame to JSON
relation_data_json = json.loads(relation_data.to_json())
# Append the new data to the end of features of mjosa_div.json
write_json_to_file("server/lake_relations", "mjosa", relation_data_json)
# Returns a list of [(sub_div_id, sub_div_center)]
def get_id_and_center(file_name): # NB buggy
# Expected format: [(id, [x,y]), (id, [x,y])]
geo_data = gpd.read_file("server/lake_relations/" + file_name + "_div.json")
subdivisions = []
for index, row in geo_data.iterrows():
sub_div_id = row['sub_div_id']
sub_div_center = row['sub_div_center']
print("sub_div_id: ", sub_div_id)
subdivision = {
'sub_div_id': sub_div_id,
'sub_div_center': sub_div_center
}
subdivisions.append(subdivision)
return subdivisions
......@@ -2,7 +2,7 @@ import json
from datetime import datetime
import random
import geopandas as gpd
from server.map.add_lake import write_json_to_file
from server.map.add_new_lake import write_json_to_file
# get_markers requests all marker data or valid markers, converts the data to json, and writes
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment