Skip to content
Snippets Groups Projects
Commit 77227932 authored by Sara Savanovic Djordjevic's avatar Sara Savanovic Djordjevic
Browse files

update: try to read ice stats from json file, index error

parent 9c01cd54
No related branches found
No related tags found
1 merge request!19Clhp map, license agreement
No preview for this file type
No preview for this file type
...@@ -12,3 +12,4 @@ SSL_CERT_PATH = CERT_DIR + "testCert.crt" ...@@ -12,3 +12,4 @@ SSL_CERT_PATH = CERT_DIR + "testCert.crt"
# File paths # File paths
MAP_HANDLER_PATH = "server/map_handler/" MAP_HANDLER_PATH = "server/map_handler/"
LAKE_RELATIONS_PATH = MAP_HANDLER_PATH + "lake_relations/" LAKE_RELATIONS_PATH = MAP_HANDLER_PATH + "lake_relations/"
STATS_OUTPUT_PATH = "server/ModelFromNVE/outputs/plots/"
...@@ -11,7 +11,7 @@ from server.scheduler import update_scheduler ...@@ -11,7 +11,7 @@ from server.scheduler import update_scheduler
from server.consts import LAKE_RELATIONS_PATH from server.consts import LAKE_RELATIONS_PATH
from map_handler.get_lake_relation import get_map_data_handler from map_handler.get_lake_relation import get_map_data_handler
from map_handler.input_new_data import input_new_Lidar_data from map_handler.input_new_data import input_new_Lidar_data
from map_handler.update_measurements import update_measurements_handler, addTestData from map_handler.update_measurements import update_measurements_handler, add_test_data
app = Flask(__name__) app = Flask(__name__)
terminate_server = 0 terminate_server = 0
...@@ -124,7 +124,7 @@ class IceHTTP(BaseHTTPRequestHandler): ...@@ -124,7 +124,7 @@ class IceHTTP(BaseHTTPRequestHandler):
lake_name_param = query_params.get('lake', [''])[0] lake_name_param = query_params.get('lake', [''])[0]
lake_name = unquote(lake_name_param) # Decode url param lake_name = unquote(lake_name_param) # Decode url param
addTestData(self, lake_name) add_test_data(self, lake_name)
elif self.path.startswith('/new_lidar_data'): elif self.path.startswith('/new_lidar_data'):
parsed_path = urlparse(self.path) parsed_path = urlparse(self.path)
......
No preview for this file type
...@@ -3,8 +3,7 @@ import json ...@@ -3,8 +3,7 @@ import json
import random import random
from datetime import datetime from datetime import datetime
from server.consts import LAKE_RELATIONS_PATH from server.consts import LAKE_RELATIONS_PATH, STATS_OUTPUT_PATH
from server.ModelFromNVE.icemodellingscripts.getIceThicknessLakes import get_raw_dates, ice_prognosis_raw_data
def update_measurements_handler(self, lake_name: str): def update_measurements_handler(self, lake_name: str):
...@@ -14,7 +13,7 @@ def update_measurements_handler(self, lake_name: str): ...@@ -14,7 +13,7 @@ def update_measurements_handler(self, lake_name: str):
self.send_header("Content-type", "application/json") self.send_header("Content-type", "application/json")
self.end_headers() self.end_headers()
self.wfile.write(measurement_data.encode('utf-8')) self.wfile.write(measurement_data)
def update_measurements(lake_name: str) -> (int, str): def update_measurements(lake_name: str) -> (int, str):
...@@ -31,18 +30,24 @@ def update_measurements(lake_name: str) -> (int, str): ...@@ -31,18 +30,24 @@ def update_measurements(lake_name: str) -> (int, str):
return 404, f"{lake_name} does not exists in the system" return 404, f"{lake_name} does not exists in the system"
# Define file path to lidar data file # Define file path to lidar data file
file_path = os.path.join(LAKE_RELATIONS_PATH, lake_name + '_lidar_data.json') lidar_data_path = os.path.join(LAKE_RELATIONS_PATH, lake_name + '_lidar_data.json')
# Lists to store processed data # Lists to store processed data
sub_div_ids = [] sub_div_ids = []
measurements = [] measurements = []
# Some lakes may not have any recent lidar data, so must check if the file exists # Some lakes may not have any recent lidar data, so must check if the file exists
if os.path.exists(file_path): if os.path.exists(lidar_data_path):
# Read the newest lidar data from JSON file # Read the newest lidar data from JSON file
with open(file_path, 'r') as file: with open(lidar_data_path, 'r') as file:
lidar_data = json.load(file) lidar_data = json.load(file)
all_ice_stats = []
if os.path.exists(STATS_OUTPUT_PATH + lake_name + "_sub_div.json"):
# Tro to read ice stats from NVE model for current lake
with open(STATS_OUTPUT_PATH + lake_name + "_sub_div.json", 'r') as file:
all_ice_stats = json.load(file)
# Iterate over all fetched rows # Iterate over all fetched rows
for measurement in lidar_data: for measurement in lidar_data:
processed_subdivs = [] processed_subdivs = []
...@@ -68,19 +73,18 @@ def update_measurements(lake_name: str) -> (int, str): ...@@ -68,19 +73,18 @@ def update_measurements(lake_name: str) -> (int, str):
center_lng = round(sub_division['CenLongitude'], 4) center_lng = round(sub_division['CenLongitude'], 4)
avg_thickness = sub_division['AvgThickness'] avg_thickness = sub_division['AvgThickness']
# Read from json file # Initialise list for the current ice stats
# lakName_sub_div.json ice_stats = []
# Add center coord txt file
# Retrieve ice statistics for current subdivision
ice_stats = get_raw_dates(ice_prognosis_raw_data(sub_div_id=subdiv_id, x=center_lat, y=center_lng))
print("Fails here?")
# Ice statistics were retrieved successfully # Ice statistics were retrieved successfully
if len(ice_stats) > 0 and len(ice_stats[0]) > 0: if len(all_ice_stats) >= subdiv_id is not None or all_ice_stats[subdiv_id] != "Null":
ice_stats = all_ice_stats[subdiv_id]
accuracy = 3 accuracy = 3
print("Fails here, later?")
# Increase accuracy by 1 if the LiDar data and NVE data have a minimal discrepancy # Increase accuracy by 1 if the LiDar data and NVE data have a minimal discrepancy
if abs(avg_thickness - ice_stats[0]['Total ice (m)']) < 1.0: if abs(avg_thickness - all_ice_stats[subdiv_id][3]['Black ice (m)']) < 1.0:
accuracy = 4 accuracy = 4
else: # Failed to retrieve ice statistics, initialise empty ice stats object else: # Failed to retrieve ice statistics, initialise empty ice stats object
ice_stats = { ice_stats = {
...@@ -117,7 +121,7 @@ def update_measurements(lake_name: str) -> (int, str): ...@@ -117,7 +121,7 @@ def update_measurements(lake_name: str) -> (int, str):
measurements.append(new_measurement) measurements.append(new_measurement)
# Populate remaining non-processed subdivisions and create "invalid" or "proxy" measurement to store them # Populate remaining non-processed subdivisions and create "invalid" or "proxy" measurement to store them
remaining_sub_divs = fill_remaining_subdivisions(lake_name, sub_div_ids) remaining_sub_divs = fill_remaining_subdivisions(lake_name, sub_div_ids, all_ice_stats)
proxy = { proxy = {
'MeasurementID': -1, 'MeasurementID': -1,
'TimeMeasured': str(datetime.now()), 'TimeMeasured': str(datetime.now()),
...@@ -135,14 +139,16 @@ def update_measurements(lake_name: str) -> (int, str): ...@@ -135,14 +139,16 @@ def update_measurements(lake_name: str) -> (int, str):
# Convert list of dictionaries to JSON # Convert list of dictionaries to JSON
response_data = json.dumps(measurements, indent=4) response_data = json.dumps(measurements, indent=4)
# Set headers
# Return data
return 200, response_data return 200, response_data
except Exception as e: except Exception as e:
print(f"Error in updating measurements: {e}")
return 500, f"Error in updating measurements: {e}".encode('utf-8') return 500, f"Error in updating measurements: {e}".encode('utf-8')
def fill_remaining_subdivisions(lake_name: str, processed_ids: list): def fill_remaining_subdivisions(lake_name: str, processed_ids: list, all_ice_stats):
""" """
Returns a list of subdivision dictionaries for subdivisions without measurements. Returns a list of subdivision dictionaries for subdivisions without measurements.
...@@ -170,10 +176,11 @@ def fill_remaining_subdivisions(lake_name: str, processed_ids: list): ...@@ -170,10 +176,11 @@ def fill_remaining_subdivisions(lake_name: str, processed_ids: list):
center_lng = round(sub_div['properties']['sub_div_center'][1], 4) center_lng = round(sub_div['properties']['sub_div_center'][1], 4)
# Fetch weather data for each subdivision from the NVE model # Fetch weather data for each subdivision from the NVE model
ice_stats = get_raw_dates(ice_prognosis_raw_data(sub_div_id=sub_div_id, x=center_lat, y=center_lng)) ice_stats = []
if len(ice_stats) > 0 and len(ice_stats[0]) > 0: if len(all_ice_stats) >= sub_div_id and all_ice_stats[sub_div_id] != "Null":
total_ice_thickness = ice_stats[0]['Total ice (m)'] ice_stats = all_ice_stats[sub_div_id]
total_ice_thickness = ice_stats[0]['Black ice (m)']
accuracy = 1 accuracy = 1
else: # Initialise empty ice stats else: # Initialise empty ice stats
ice_stats = { ice_stats = {
...@@ -199,9 +206,7 @@ def fill_remaining_subdivisions(lake_name: str, processed_ids: list): ...@@ -199,9 +206,7 @@ def fill_remaining_subdivisions(lake_name: str, processed_ids: list):
'CenLatitude': center_lat, 'CenLatitude': center_lat,
'CenLongitude': center_lng, 'CenLongitude': center_lng,
'Accuracy': accuracy, 'Accuracy': accuracy,
# Calculate ice thickness based on total ice, temporary 'Color': calculateColor(total_ice_thickness),
# 'Color': calculateColor(ice_stats[0]['Total ice (m)']),
'Color': calculateColor(ice_stats[0]['Black ice (m)']),
'IceStats': ice_stats, 'IceStats': ice_stats,
} }
sub_divisions.append(sub_division) sub_divisions.append(sub_division)
...@@ -227,7 +232,7 @@ def calculateColor(thickness: float): ...@@ -227,7 +232,7 @@ def calculateColor(thickness: float):
return 0 # Grey return 0 # Grey
def addTestData(self, lake_name: str): def add_test_data(self, lake_name: str):
""" """
Adds random test data to lake_name_lidar_data.json. This function is purly for testing, not production. Adds random test data to lake_name_lidar_data.json. This function is purly for testing, not production.
The function overwrites the lidar data for the selected lake. The function overwrites the lidar data for the selected lake.
......
...@@ -14,7 +14,7 @@ def update_all_measurements(lake_names: list): ...@@ -14,7 +14,7 @@ def update_all_measurements(lake_names: list):
from_date = "2024-01-10" from_date = "2024-01-10"
to_date = "2024-01-20" to_date = "2024-01-20"
update_data(from_date, to_date, lake_name=lake_names, update_data(from_date, to_date, lake_name=lake_names, # NB make call every day with False, evey 4th day with true
sub_divs_folder = LAKE_RELATIONS_PATH + lake_names + '_centers.txt', update_all_bboxes=True) sub_divs_folder = LAKE_RELATIONS_PATH + lake_names + '_centers.txt', update_all_bboxes=True)
update_measurements_handler(None, lake) update_measurements_handler(None, lake)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment