Skip to content
Snippets Groups Projects
Commit 1a321207 authored by Joakim Aleksandersen's avatar Joakim Aleksandersen
Browse files

Merge branch 'Sentinenl_hub_integration'

# Conflicts:
#	server/ModelFromNVE/icemodellingscripts/getIceThicknessLakes.py
parents fbaffeb2 8c4446e1
No related branches found
No related tags found
No related merge requests found
......@@ -7,9 +7,10 @@ import server.ModelFromNVE.setenvironment as se
from server.ModelFromNVE.icemodelling import parameterization as dp, icethickness as it, weatherelement as we, \
ice as ice
from server.ModelFromNVE.utilities import makeplots as pts, getgts as gts, getwsklima as gws
from server.Sentinelhub import box_funcitons, getAreaInfo, sentinel_config
def ice_prognosis_raw_data(to_date=None, sub_div_id=0, x=10.709985478463118, y=60.810991171403316,
def ice_prognosis_raw_data(to_date=None, sub_div_id=0, x=10.70, y=60.81,
altitude=0, awt=[], mf=0, icerun_dates=[]):
"""
......@@ -88,7 +89,7 @@ def ice_prognosis_raw_data(to_date=None, sub_div_id=0, x=10.709985478463118, y=6
air_temp_value.append(gridTemp[i].Value)
if len(awt) > 0:
calculated_ice = None
calculated_ice = None # TODO
else:
calculated_ice = it.calculate_ice_cover_air_temp(copy.deepcopy(first_ice), date, temp, sno, cloud_cover=cc,
icerun_dates=icerun_dates)
......@@ -182,6 +183,18 @@ def get_raw_dates(data, from_date=None, to_date=None):
return filtred_data
def format_dates(date_list):
formatted_dates = []
for date_string in date_list:
try:
before = dt.datetime.strptime(date_string, '%Y-%m-%d')
after = before.strftime('%d.%m.%Y')
formatted_dates.append(after)
except ValueError as e:
print(f"Error processing date {date_string}: {e}")
return formatted_dates
# change to take a list of data with an sub div id first followed by data [sub_div_id, data]
def jsonify_data(data, name="temp", location=se.plot_folder):
"""
......@@ -256,8 +269,41 @@ def get_subdiv_ids_n_cords(file_path):
return id_list
def update_data(from_date=None, to_date=None, lake_name="skumsjoen",
sub_divs_folder='../../map_handler/lake_relations/skumsjøen_centers.txt', update_all_bboxes=False):
if update_all_bboxes:
getAreaInfo.update_all_polynomials()
sub_divs = get_subdiv_ids_n_cords(sub_divs_folder)
filtered_data_for_dates = []
# Iterate over each subdivision in the list
for subdivision in sub_divs:
sub_div_id = subdivision[0]
x_coordinate = subdivision[1]
y_coordinate = subdivision[2]
closest_box, box_id = box_funcitons.get_closest_bbox_and_id(x_coordinate, y_coordinate) # validate
data = getAreaInfo.read_from_csv(f"bbox_sat_data/lake_index_{box_id}")
icerundates = getAreaInfo.get_ice_run_dates(data)
icerundates = format_dates(icerundates)
# Fetch the raw data for the subdivision
raw_data = ice_prognosis_raw_data(sub_div_id=sub_div_id, x=x_coordinate, y=y_coordinate,
icerun_dates=icerundates) # <- problem
# Filter the raw data based on the specified date range
filtered_data = get_raw_dates(raw_data, from_date, to_date)
# Add the filtered data to the list
filtered_data_for_dates.append((sub_div_id, filtered_data))
jsonify_data_sub_div_ids(lake_name, filtered_data_for_dates, location=se.plot_folder)
return
if __name__ == "__main__":
'''
'''
data = ice_prognosis_raw_data()
from_date = "2024-01-10"
......@@ -268,38 +314,22 @@ if __name__ == "__main__":
all_will_be_one = [(1, filtered_dates2), [2, filtered_dates]]
jsonify_data_sub_div_ids(all_will_be_one)
'''
sub_divs = get_subdiv_ids_n_cords('../../map_handler/lake_relations/skumsjøen_centers.txt') # lokasjon for txt fil
from_date = "2024-01-10"
to_date = "2024-01-20"
#filtered_data_for_dates = [(i[0], get_raw_dates(ice_prognosis_raw_data(sub_div_id=i[0], x=i[1], y=i[2])), from_date, to_date) for i in sub_divs ]
filtered_data_for_dates = [(i[0], get_raw_dates(ice_prognosis_raw_data(sub_div_id=i[0], x=i[1], y=i[2]))) for i in sub_divs ]
jsonify_data_sub_div_ids("skumsjoen", filtered_data_for_dates, location = se.plot_folder)
'''
filtered_data_for_dates = []
# Iterate over each subdivision in the list
for subdivision in sub_divs:
# Unpack the ID, x-coordinate, and y-coordinate for the subdivision
sub_div_id = subdivision[0]
x_coordinate = subdivision[1]
y_coordinate = subdivision[2]
update_data(from_date, to_date, lake_name="skumsjoen",
sub_divs_folder='../../map_handler/lake_relations/skumsjøen_centers.txt', update_all_bboxes=True)
# Fetch the raw data for the subdivision
raw_data = ice_prognosis_raw_data(sub_div_id=sub_div_id, x=x_coordinate, y=y_coordinate) # <- problem
# filtered_data_for_dates = [(i[0], get_raw_dates(ice_prognosis_raw_data(sub_div_id=i[0], x=i[1], y=i[2])), from_date, to_date) for i in sub_divs ]
# Filter the raw data based on the specified date range
filtered_data = get_raw_dates(raw_data, from_date, to_date)
# without iceruns
# filtered_data_for_dates1 = [(i[0], get_raw_dates(ice_prognosis_raw_data(sub_div_id=i[0], x=i[1], y=i[2]), from_date, to_date)) for i in sub_divs ]
# Add the filtered data to the list
filtered_data_for_dates.append(filtered_data)
'''
print("hello world")
#getAreaInfo.update_all_polynomials()
# ice_prognosis()
pass
interval_from,interval_to,default_B0_min,default_B0_max,default_B0_mean,default_B0_stDev,default_B0_sampleCount,default_B0_noDataCount,ice_condition
2023-09-12,2023-09-13,51.0,177.0,100.37,13.59,65536,0,No ice
2023-09-17,2023-09-18,146.0,234.0,186.65,10.61,65536,0,No ice
2023-09-24,2023-09-25,151.0,218.0,184.16,8.01,65536,0,No ice
2023-10-02,2023-10-03,133.0,202.0,161.18,7.64,65536,0,No ice
2023-10-04,2023-10-05,89.0,184.0,153.65,7.8,65536,0,No ice
2023-10-12,2023-10-13,151.0,233.0,186.25,10.33,65536,0,No ice
2023-10-17,2023-10-18,42.0,255.0,117.26,31.39,65536,0,No ice
2023-10-19,2023-10-20,137.0,211.0,169.23,8.01,65536,0,No ice
2023-11-18,2023-11-19,148.0,255.0,195.61,13.44,65536,0,No ice
2023-12-03,2023-12-04,88.0,234.0,176.98,16.47,65536,0,No ice
2023-12-23,2023-12-24,95.0,115.0,104.81,2.71,65536,0,Ice
2024-01-05,2024-01-06,106.0,121.0,112.41,1.43,65536,0,Ice
2024-01-25,2024-01-26,115.0,124.0,119.46,1.29,65536,0,Ice
2024-02-01,2024-02-02,114.0,135.0,118.41,1.4,65536,0,Ice
2024-02-09,2024-02-10,115.0,124.0,119.96,1.15,65536,0,Ice
2024-02-21,2024-02-22,117.0,130.0,122.3,0.75,65536,0,Ice
2024-02-26,2024-02-27,124.0,236.0,131.93,8.24,65536,0,No ice
2024-03-07,2024-03-08,124.0,255.0,155.21,15.82,65536,0,No ice
2024-03-12,2024-03-13,117.0,132.0,122.41,1.67,65536,0,Ice
2024-04-01,2024-04-02,126.0,231.0,156.1,17.83,65536,0,No ice
2024-04-16,2024-04-17,117.0,168.0,134.57,5.88,65536,0,Ice
2024-04-21,2024-04-22,152.0,255.0,192.65,8.99,65536,0,No ice
2024-05-04,2024-05-05,115.0,157.0,133.92,5.15,65536,0,Ice
{
"type": "MultiPolygon",
"coordinates": [
[
[
[10.612106,60.970096],
[10.612106,60.978597],
[10.643692,60.978597],
[10.643692,60.970096],
[10.612106,60.970096]
]
],
[
[
[10.656395, 60.932069],
[10.656395, 60.943416],
[10.674591, 60.943416],
[10.674591, 60.932069],
[10.656395, 60.932069]
]
],
[
[
[10.686951, 60.885138],
[10.686951, 60.900009],
[10.715446, 60.900009],
[10.715446, 60.885138],
[10.686951, 60.885138]
]
]
]
}
"""
TODO: skriv i readme om hovrdan legge til en bbox i box.json
"""
import json
import math
import os
from sentinelhub import (BBox, CRS)
def create_BBox(box):
""" Creates a BBox for a box given in the box.json file"""
x_coords = [coord[0] for coord in box]
y_coords = [coord[1] for coord in box]
return BBox((min(x_coords), min(y_coords), max(x_coords), max(y_coords)), CRS.WGS84)
def get_all_box():
""" Reads box.json and returns every bbox, but only the the upper xy and lower xy (BBox) such that it can be used in
sentinel get areal inf functions"""
# but why os
dir_path = os.path.dirname(os.path.realpath(__file__))
file_path = os.path.join(dir_path, 'box.json')
with open(file_path, 'r') as file:
data = json.load(file)
box_list = []
if data['type'] == 'MultiPolygon':
for polygon in data['coordinates']:
for box in polygon:
box_list.append(box)
return box_list
def get_all_bbox():
""" Returns all BBoxes, index is equal to id"""
return [create_BBox(box) for box in get_all_box()]
def get_distance(box, point):
""" Get the distance from a bbox to a point"""
x_coords = [coord[0] for coord in box]
y_coords = [coord[1] for coord in box]
dx = max(min(x_coords) - point[0], 0, point[0] - max(x_coords))
dy = max(min(y_coords) - point[1], 0, point[1] - max(y_coords))
return math.sqrt(dx**2 + dy**2)
def get_closest_bbox_and_id(lon, lat):
""" Returns the bbox closest to the cords """
boxes = get_all_box()
distances = [get_distance(box, (lon, lat)) for box in boxes]
index = distances.index(min(distances))
return create_BBox(boxes[index]), index
if __name__ == "__main__":
test = get_closest_bbox_and_id(10.66, 60.95)
print("efass")
pass
ndmi = """
//VERSION=3
function setup() {
return {
input: ["B08", "B11", "dataMask"],
output: [{
id: "default",
bands: 1,
sampleType: 'UINT8'
}, {
id: "dataMask",
bands: 1,
sampleType: 'UINT8'
}]
};
}
function evaluatePixel(samples) {
let ndmi = calculateIndex(samples.B08, samples.B11); // (NIR - SWIR1) / (NIR + SWIR1)
ndmi = normalizeIndex(ndmi); // Normalize to 0-1 scale
return {
default: [colorScale(ndmi), colorScale(ndmi), colorScale(ndmi), 255], // RGB representation using NDMI
dataMask: [samples.dataMask]
};
}
function calculateIndex(band1, band2) {
return (band1 - band2) / (band1 + band2);
}
function normalizeIndex(value) {
return (value + 1) / 2; // Transform from -1 to 1 range to 0 to 1
}
function colorScale(value) {
return Math.round(value * 255); // Scale index values to 0-255 for RGB visualization
}
"""
true_color = """
//VERSION=3
function setup() {
return {
input: [{
bands: ["B02", "B03", "B04"]
}],
output: {
bands: 3
}
};
}
function evaluatePixel(sample) {
return [sample.B04, sample.B03, sample.B02];
}
"""
error = """
SHOULD GIVE ERROR
"""
lake_ice_index = """
//VERSION=3
function setup() {
return {
input: ["B04", "B08", "B11", "B12", "dataMask"],
output: [{
id: "default",
bands: 1,
sampleType: 'UINT8'
}, {
id: "dataMask",
bands: 1,
sampleType: 'UINT8'
}]
};
}
function evaluatePixel(sample) {
var Red = sample.B04;
var SWIR2 = sample.B12;
var NIR = sample.B08;
var SWIR1 = sample.B11;
var contrast = 120
var result = ((Red + SWIR2) / (NIR + SWIR1)) * contrast;
if (sample.dataMask === 1) {
return {
default: [result],
dataMask: [1]
};
} else {
return {
default: [0],
dataMask: [0] // Output 0 to indicate invalid data
};
}
}
"""
from server.Sentinelhub import sentinel_config, evalscript, box_funcitons
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import csv
import datetime as dt
import os
from sentinelhub import (
CRS, BBox, DataCollection, SentinelHubRequest, SentinelHubStatistical, SHConfig,
Geometry, bbox_to_dimensions, DownloadRequest, MimeType, MosaickingOrder, parse_time
)
# Helperfunction made b sentinelhub
def stats_to_df(stats_data):
"""Transform Statistical API response into a pandas.DataFrame"""
df_data = []
for single_data in stats_data["data"]:
df_entry = {}
is_valid_entry = True
df_entry["interval_from"] = parse_time(single_data["interval"]["from"]).date()
df_entry["interval_to"] = parse_time(single_data["interval"]["to"]).date()
for output_name, output_data in single_data["outputs"].items():
for band_name, band_values in output_data["bands"].items():
band_stats = band_values["stats"]
if band_stats["sampleCount"] == band_stats["noDataCount"]:
is_valid_entry = False
break
for stat_name, value in band_stats.items():
col_name = f"{output_name}_{band_name}_{stat_name}"
if stat_name == "percentiles":
for perc, perc_val in value.items():
perc_col_name = f"{col_name}_{perc}"
df_entry[perc_col_name] = round(perc_val, 2)
elif isinstance(value, (int, float)):
if stat_name not in ["sampleCount", "noDataCount"]:
df_entry[col_name] = round(value, 2)
else:
df_entry[col_name] = value
if is_valid_entry:
df_data.append(df_entry)
return pd.DataFrame(df_data)
def statistical_request_sentinel(config, evalscript, time_interval, maxcc, bbox):
try:
request = SentinelHubStatistical(
aggregation=SentinelHubStatistical.aggregation(
evalscript=evalscript,
time_interval=time_interval,
aggregation_interval="P1D"
),
input_data=[
SentinelHubStatistical.input_data(
data_collection=DataCollection.SENTINEL2_L1C,
maxcc=maxcc
)
],
bbox=bbox,
config=config,
)
stats = request.get_data()
dfs = [stats_to_df(polygon_stats) for polygon_stats in stats]
return pd.concat(dfs)
except Exception as e:
print(f"Something is wrong with request, error: {e}")
return None
def classify_ice(data):
""" set a label in data based on st deviation >11 cloud, >6 no ice, >2 some ice, <=2 ice"""
ice_conditions = []
# remove dates where the picture are cropped
data = data[data['default_B0_noDataCount'] <= 1000]
for st_dev in data['default_B0_stDev']:
if st_dev > 35:
ice_conditions.append('High probability of Cloud')
elif st_dev > 6.5:
ice_conditions.append('No ice') # does have the possability of being cracked ice aswell
#elif st_dev > 9999:
# ice_conditions.append('Weak ice or cracked ice')
else:
ice_conditions.append('Ice')
data['ice_condition'] = ice_conditions
return data
def save_data_to_csv(file, data):
filename = f"{file}.csv"
data.to_csv(filename, index=False)
def append_data_to_csv(file, data):
""" adds all data at to an alredy existing csv if the format is the same, if file does not exist, crate a new file
using save_data_to_file(file, data) """
try:
temp = pd.read_csv(f"{file}.csv")
temp = pd.concat([temp, data], ignore_index=True)
temp.to_csv(f"{file}.csv", index=False)
except FileNotFoundError:
save_data_to_csv(file, data)
except Exception as e:
print(f"An error occured: {e}")
def get_last_date(data):
""" Returns last date of the last picture in file, if file does not exist, returns previous August 1st. """
if data is not None and not data.empty: # Check if 'data' is not None and not empty
df_date = pd.DataFrame(data, columns=['interval_to']).tail(1)
if not df_date.empty:
return df_date['interval_to'].iloc[0] # Return the last date
# If 'data' is None or empty, calculate the previous August 1st
today = dt.datetime.now()
prev_august = today.year if today.month >= 8 else today.year - 1
prev_august_date = dt.datetime(prev_august, 8, 1)
return prev_august_date.strftime("%Y-%m-%d")
def update_to_most_recent(file, BBox, evalscript = evalscript.lake_ice_index, maxcc = 0.4):
""" if file exists, tries to get the most recent date. uses that date to do get all dates until now. Appends that data to the file
if the file does not exist. create a new one and put all data into it. """
try:
data = pd.read_csv(f"{file}.csv")
last_date = get_last_date(data)
today = dt.datetime.now().strftime("%Y-%m-%d")
data = statistical_request_sentinel(config= sentinel_config.config, evalscript=evalscript,
time_interval=(last_date, today),
maxcc=maxcc, bbox=BBox)
if data is not None and data.empty is False:
data = classify_ice(data)
append_data_to_csv(file, data)
return
except FileNotFoundError:
last_date = get_last_date(None)
today = dt.datetime.now().strftime("%Y-%m-%d")
data = statistical_request_sentinel(config=sentinel_config.config, evalscript=evalscript,
time_interval=(last_date, today),
maxcc=maxcc, bbox=BBox)
if data is not None:
data = classify_ice(data)
save_data_to_csv(file, data)
return
def read_from_csv(file):
""" Reads from name.csv and returns a data """
try:
dir_path = os.path.dirname(os.path.realpath(__file__))
file = os.path.join(dir_path, file)
data = pd.read_csv(f"{file}.csv")
return data
except FileNotFoundError:
return None
except Exception as e:
print(f"An error occured: {e}")
def update_all_polynomials(file_name_format = "bbox_sat_data/lake"):
"""
With all polynomials from box functions. uses getArealinfo to update the arealinfo for each area and creates
or updates the square at id given index box.
"""
all_bbox = box_funcitons.get_all_bbox()
dir_path = os.path.dirname(os.path.realpath(__file__))
file_name_format = os.path.join(dir_path, file_name_format)
for i, box in enumerate(all_bbox):
update_to_most_recent(f"{file_name_format}_index_{i}", box)
return
def get_ice_run_dates(data):
""" Get the dates for when the ice is on and of"""
""" Interpolates the date between two dates where the state for the ice_condition changes, "does count cloudy days " """
ice_run_dates = []
# Iterate through the DataFrame to find condition changes
for i in range(len(data) - 1):
current_condition = data.iloc[i]['ice_condition']
next_condition = data.iloc[i + 1]['ice_condition']
# Check if there is a change in condition, skipps clouds during fall, but assumes clouds to be breaking ice
# if it is after ice as the cloud labeling has a probability to be caused by ice breakage.
if current_condition != next_condition and (current_condition, next_condition) not in \
{('No ice', 'High probability of Cloud'), ("High probability of Cloud", "No ice")}:
# Interpolate the date between the current interval_to and the next interval_from
current_to = pd.to_datetime(data.iloc[i]['interval_to'])
next_from = pd.to_datetime(data.iloc[i + 1]['interval_from'])
midpoint_date = current_to + (next_from - current_to) / 2
ice_run_dates.append(midpoint_date.strftime('%Y-%m-%d'))
return ice_run_dates
if __name__ == "__main__":
# updates all the .csv files including information about images taken from sentinel hub
update_all_polynomials()
# location for an given box and returns the closest box cordinates and the id of the box
lon, lat = 10.66, 60.95 # mjos
closest_box, box_id = box_funcitons.get_closest_bbox_and_id(lon, lat)
data = read_from_csv(f"bbox_sat_data/lake_{box_id}")
icerundates = get_ice_run_dates(data)
print("33")
pass
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment