diff --git a/server/data_processing/__pycache__/process_lidar_data.cpython-311.pyc b/server/data_processing/__pycache__/process_lidar_data.cpython-311.pyc index 29a63e52c3216522dfbadd87d89f7e180e850df4..4cceb079b85cd33d853adb8d5d15072b4530def9 100644 Binary files a/server/data_processing/__pycache__/process_lidar_data.cpython-311.pyc and b/server/data_processing/__pycache__/process_lidar_data.cpython-311.pyc differ diff --git a/server/data_processing/process_lidar_data.py b/server/data_processing/process_lidar_data.py index 46162b5a0db5bc9543e95b9332031865ad49a5f0..ee70948b260e49b46630263ebd274a78dbd9724a 100644 --- a/server/data_processing/process_lidar_data.py +++ b/server/data_processing/process_lidar_data.py @@ -10,21 +10,9 @@ lazData_path = ["server/example_lidar_data/ot_N_000005_1.laz", "server/example_l # Info about data with laspy.open(lazData_path[0]) as fh: - # Print metadata properties - print("File Version:", fh.header.version) - print("Point Count:", fh.header.point_count) - print("Scale Factors:", fh.header.scale) - print("Offset:", fh.header.offset) - - print('Points from Header:', fh.header.point_count) las = fh.read() - print(las) - print('Points from data:', len(las.points)) ground_pts = las.classification == 2 bins, counts = np.unique(las.return_number[ground_pts], return_counts=True) - print('Ground Point Return Number distribution:') - for r, c in zip(bins, counts): - print(' {}:{}'.format(r, c)) # check if lidar points is within range of the area selected def inArea(position, areaRange): diff --git a/server/map/__pycache__/get_markers.cpython-311.pyc b/server/map/__pycache__/get_markers.cpython-311.pyc index 9afbb6c07ee725a0171dfda4a7e56e20c3fe698a..52efde446f4acf064a28ea6968ca718795092912 100644 Binary files a/server/map/__pycache__/get_markers.cpython-311.pyc and b/server/map/__pycache__/get_markers.cpython-311.pyc differ diff --git a/server/map/get_markers.py b/server/map/get_markers.py index 00ead05e6d55fcf4c8d4c2c849508127aab83d2b..15c4b2332d2b16bc042514899d4a1cbc8f3a34de 100644 --- a/server/map/get_markers.py +++ b/server/map/get_markers.py @@ -1,4 +1,7 @@ import json +from datetime import datetime +import random +from random import randint # get_markers requests all marker data or valid markers, converts the data to json, and writes # the data to the response object @@ -62,8 +65,50 @@ def get_all_markers(self, cursor, waterBodyName): 'Subdivisions': [sub_division], # Array of sub_division objects } + ########################### TEST DATA ########################################### + # Temporary test data + test_measurements = [] + subdiv_id = 17 + + for i in range(3, 10): + sub_divisions = [] + + for j in range(0, 30): + min_thickness = random.uniform(0, 10) + avg_thickness = random.uniform(0, 15) + min_thickness + + subdivision = { + 'SubdivID': subdiv_id, + 'GroupID': 1, + 'MinThickness': min_thickness, + 'AvgThickness': avg_thickness, + 'CenLatitude': 7.0, + 'CenLongitude': 8.0, + 'Accuracy': 1.0, + 'Color': calculateColor(min_thickness) + } + + sub_divisions.append(subdivision) + subdiv_id += 1 + + measurement = { + 'MeasurementID': i, + 'TimeMeasured': str(datetime.now()), + 'CenterLat': 10.0, + 'CenterLon': 8.0, + 'Sensor': { + 'SensorID': 1, + 'SensorType': "test data", + 'Active': True + }, + 'Subdivisions': sub_divisions + } + + test_measurements.append(measurement) + ########################### TEST DATA ########################################### + # Convert dictionary values to list of measurements - data = list(measurement_data.values()) + data = list(measurement_data.values()) + test_measurements if len(rows) == 0 or len(data) == 0: # Return 500 and empty list if no data is found print(f"No data which meets the condition found") @@ -85,7 +130,7 @@ def get_all_markers(self, cursor, waterBodyName): self.wfile.write(marker_data.encode('utf-8')) -def calculateColor(thickness: int): # NB not final colors nor ranges +def calculateColor(thickness: float): # NB not final colors nor ranges if 0 < thickness <= 4: return 0xFFff0000 # Red elif 4 < thickness <= 6: