Skip to content
Snippets Groups Projects
Commit 024502a8 authored by Sara Savanovic Djordjevic's avatar Sara Savanovic Djordjevic
Browse files

add: comments and docstrings to get_measurements.py

parent e2593c90
No related branches found
No related tags found
1 merge request!14Clhp map
No preview for this file type
No preview for this file type
......@@ -6,7 +6,16 @@ from server.ModelFromNVE.icemodellingscripts.getIceThicknessLakes import get_raw
def get_measurements(self, cursor, lake_name):
"""
Retrieves the measurement data from the database for a given lake, and adds weather data to each subdivision.
Parameters:
self (BaseHTTPRequestHandler): A instance of a BaseHTTPRequestHandler
cursor (cursor): An Sqlite3 cursor object that points to the database
lake_name (str): The name of the requested file/lake
"""
try:
# SQL query to fetch all measurements and subdivisions for the requested lake
sql_query = '''
SELECT m.MeasurementID, m.SensorID, m.TimeMeasured, m.CenterLat, m.CenterLon,
s.SensorType, s.Active,
......@@ -21,8 +30,8 @@ def get_measurements(self, cursor, lake_name):
WHERE b.Name = ?
'''
# Execute the query with the lake name as parameter
cursor.execute(sql_query, (lake_name,))
rows = cursor.fetchall()
# List of all fetched measurement objects
......@@ -36,7 +45,7 @@ def get_measurements(self, cursor, lake_name):
center_lat = row[12]
center_lng = row[13]
# Create subdivision new object
# Create new subdivision object
sub_division = {
'SubdivID': sub_div_id,
'GroupID': row[9],
......@@ -46,6 +55,7 @@ def get_measurements(self, cursor, lake_name):
'CenLongitude': center_lng,
'Accuracy': row[14],
'Color': calculateColor(row[11]), # NB color calculated based on average thickness, should be minimum
# Fetch weather data from the NVE model
'IceStats': get_raw_dates(ice_prognosis_raw_data(sub_div_id=sub_div_id, x=center_lat, y=center_lng))
}
sub_div_ids.append(sub_div_id)
......@@ -71,7 +81,7 @@ def get_measurements(self, cursor, lake_name):
'Subdivisions': [sub_division], # Array of sub_division objects
}
# Populate remaining subdivisions and create "invalid" measurement to store them
# Populate remaining subdivisions and create "invalid" or "proxy" measurement to store them
remaining_sub_divs = fill_remaining_subdivisions(lake_name, sub_div_ids)
measurement_data[-1] = {
'MeasurementID': -1,
......@@ -113,23 +123,36 @@ def get_measurements(self, cursor, lake_name):
self.wfile.write(marker_data.encode('utf-8'))
# Get data for subdivisions that have not been measured by sensors, and thus are not in the database
def fill_remaining_subdivisions(lake_name: str, sub_div_ids: list):
"""
Returns a list of subdivision dictionaries for subdivisions without measurements.
Parameters:
lake_name (str): The name of the requested file/lake
sub_div_ids (list): A list of ids (int) of all subdivisions that have already been processed
Returns:
sub_divisions (list): A list of subdivision dictionaries
"""
try:
# Read the lake relation for the requested lake
with open(LAKE_RELATIONS_PATH + lake_name + '_div.json', 'r') as file:
data = json.load(file)
relation = json.load(file)
relation = data
sub_divisions = []
# Loop through each feature and extract all subdivisions
for sub_div in relation['features']:
sub_div_id = int(sub_div['properties']['sub_div_id'])
# Only get subdivisions that are not in the list
if sub_div_id not in sub_div_ids:
center_lat = sub_div['properties']['sub_div_center'][0]
center_lng = sub_div['properties']['sub_div_center'][1]
# Fetch weather data for each subdivision from the NVE model
ice_stats = get_raw_dates(ice_prognosis_raw_data(sub_div_id=sub_div_id, x=center_lat, y=center_lng))
# Create new subdivision object
sub_division = {
'SubdivID': sub_div_id,
'GroupID': None,
......@@ -142,7 +165,6 @@ def fill_remaining_subdivisions(lake_name: str, sub_div_ids: list):
'Color': calculateColor(ice_stats[0]['Total ice (m)']),
'IceStats': ice_stats,
}
sub_divisions.append(sub_division)
return sub_divisions
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment