diff --git a/server/data_processing/process_lidar_data.py b/server/data_processing/process_lidar_data.py index 46162b5a0db5bc9543e95b9332031865ad49a5f0..97f0e1bde6b7bb87690b739b0a467ad53d5db1ed 100644 --- a/server/data_processing/process_lidar_data.py +++ b/server/data_processing/process_lidar_data.py @@ -9,22 +9,24 @@ from server.data_processing.area_processing import calculate_corners, define_gri lazData_path = ["server/example_lidar_data/ot_N_000005_1.laz", "server/example_lidar_data/ot_N_000033_1.laz"] # Info about data -with laspy.open(lazData_path[0]) as fh: - # Print metadata properties - print("File Version:", fh.header.version) - print("Point Count:", fh.header.point_count) - print("Scale Factors:", fh.header.scale) - print("Offset:", fh.header.offset) - - print('Points from Header:', fh.header.point_count) - las = fh.read() - print(las) - print('Points from data:', len(las.points)) - ground_pts = las.classification == 2 - bins, counts = np.unique(las.return_number[ground_pts], return_counts=True) - print('Ground Point Return Number distribution:') - for r, c in zip(bins, counts): - print(' {}:{}'.format(r, c)) +def about_laz_file(): + with laspy.open(lazData_path[0]) as fh: + # Print metadata properties + print("File Version:", fh.header.version) + print("Point Count:", fh.header.point_count) + print("Scale Factors:", fh.header.scale) + print("Offset:", fh.header.offset) + + las = fh.read() + print(las) + print('Points from data:', len(las.points)) + ground_pts = las.classification == 2 + bins, counts = np.unique(las.return_number[ground_pts], return_counts=True) + print('Ground Point Return Number distribution:') + for r, c in zip(bins, counts): + print(' {}:{}'.format(r, c)) + + return [las.header.version, las.header.point_count, las.header.scale, las.header.offset] # check if lidar points is within range of the area selected def inArea(position, areaRange): diff --git a/server/map/__pycache__/get_markers.cpython-39.pyc b/server/map/__pycache__/get_markers.cpython-39.pyc index 153eff491a414be4d1f89f774ec15cc0bcc92577..9fc12b022571f0ea96d9af2b5b047407ea06b93b 100644 Binary files a/server/map/__pycache__/get_markers.cpython-39.pyc and b/server/map/__pycache__/get_markers.cpython-39.pyc differ diff --git a/server/map/__pycache__/get_relation.cpython-39.pyc b/server/map/__pycache__/get_relation.cpython-39.pyc index a9aa82b5f430dc0f6cbd1672148de6d7fae5e6d8..96abab2fe87409b078ffd10fb5cf0856ecce57c0 100644 Binary files a/server/map/__pycache__/get_relation.cpython-39.pyc and b/server/map/__pycache__/get_relation.cpython-39.pyc differ diff --git a/server/map/input_new_data.py b/server/map/input_new_data.py index 603a7a5a209dab0b4e2827d73cceeb4eaeeb03b5..d0a8c52b9b7a60c8de7b6d0b7a9cadf30900c3bf 100644 --- a/server/map/input_new_data.py +++ b/server/map/input_new_data.py @@ -1,6 +1,6 @@ import json from datetime import datetime -from server.data_processing.process_lidar_data import calculate_area_data +from server.data_processing.process_lidar_data import calculate_area_data, about_laz_file # input_new_Lidar_data send new data gathered from the lidar and send it to the database (from the drone, most likely) def input_new_Lidar_data(self, cursor, sensorId, bodyOfWater): @@ -11,13 +11,17 @@ def input_new_Lidar_data(self, cursor, sensorId, bodyOfWater): total_measurement_average = 0 # the total average of a measurement + # data about the file read from + about_laz = about_laz_file() + scale_factor = max(about_laz[2]) + # create a new measurement with the time the data is sent, sensor type, where # and an estimate of average thickness of ice on water body cursor.execute(''' INSERT INTO Measurement( SensorID, TimeMeasured, WaterBodyName, WholeAverageThickness, CenterLat, CenterLon) VALUES (?,?,?,?,?,?); - ''', (sensorId, datetime.utcnow().replace(microsecond=0), bodyOfWater, 0, latitude, longitude)) + ''', (sensorId, datetime.utcnow().replace(microsecond=0), bodyOfWater, 0, latitude, longitude, scale_factor)) # auto generate new measurement id measurement_id = cursor.lastrowid