Skip to content
Snippets Groups Projects
Commit fef6b971 authored by Joakim Aleksandersen's avatar Joakim Aleksandersen
Browse files

Merge branch 'Sentinenl_hub_integration'

# Conflicts:
#	README.md
#	server/ModelFromNVE/icemodellingscripts/getIceThicknessLakes.py
parents a132deda 64913bad
No related branches found
No related tags found
No related merge requests found
......@@ -174,6 +174,16 @@ should be updated to contain the newly added lake name.
![img.png](images/resulting-files.png)
#### Adding an aree for ice run dates
When adding a lake, a bounding box should also be added inside the ```/server/Sentinelhub/box.json``` file. To view and add new boxes copy and paste the current boxes into sentinel-hub.com/eo-browser, then mark a new area. Make sure the are covers an sufficient area (at least 1 km^2).
![img.png](images/sentinelhub_adding_bboxes.png)
![img.png](images/selecting-bbox.png)
When updating the ice run dates, the corresponding files will be automatically created.
This is necessary to do when creating a new lake, but can also be to add more boxes for large lakes.
### Add test data
```
Method: GET
......@@ -241,4 +251,5 @@ This software was developed by Joakim Aleksandersen, Sara Savanovic Djordjevic,
and Hoa Ben The Nguyen.
## Third-Party Software
ModelFromNVE is a third party software provided by the Norwegian Energy Regulatory Authority.
\ No newline at end of file
ModelFromNVE is a third party software provided by the Norwegian Energy Regulatory Authority.
images/selecting-bbox.png

30.5 KiB

images/sentinelhub_adding_bboxes.png

1.99 KiB

......@@ -64,11 +64,12 @@ def ice_prognosis_raw_data(to_date=None, sub_div_id=0, x=10.70, y=60.81,
observed_ice = [first_ice]
# make the x and y into utm 33 from lon lat
cords = utm.from_latlon(y, x, 33)
cords = utm.from_latlon(x, y, 33)
x, y = int(cords[0]), int(cords[1])
# check if utm is valid
x, y = validate_cords(x, y)
if validate_cords(x, y) is False:
return None
gridTemp = gts.getgts(x, y, 'tm', from_date, to_date)
gridSno = gts.getgts(x, y, 'sdfsw', from_date, to_date)
......@@ -151,17 +152,16 @@ def validate_cords(easting, northing):
northing:
Returns:
if the easting and northing is not acceptable at utm 33 returns middle of mjosa as new easting and northing values
if the easting and northing is not acceptable at utm 33 returns False else true
"""
default_x, default_y = 266707, 6749365
if not (100000 <= easting <= 900000) or not (0 <= northing <= 10000000):
easting, northing = default_x, default_y
return False
else:
print("cords are kept")
return True
return easting, northing
def get_raw_dates(data, from_date=None, to_date=None):
......@@ -174,6 +174,9 @@ def get_raw_dates(data, from_date=None, to_date=None):
Returns:
returns data for the specified time slot
"""
if data is None:
return []
if from_date is None:
from_date = (dt.datetime.now() - dt.timedelta(days=3)).strftime("%Y-%m-%d")
if to_date is None:
......@@ -212,7 +215,7 @@ def jsonify_data(data, name="temp", location=se.plot_folder):
file_path = os.path.join(location, f"data_{name}.json")
try:
with open(file_path, 'w') as json_file:
with open(file_path, 'w', encoding='utf-8') as json_file:
json.dump(data, json_file, indent=4)
print(f"Data successfully saved to {file_path}")
except Exception as e:
......@@ -239,7 +242,7 @@ def jsonify_data_sub_div_ids(lake_name, sub_div_and_data, location=se.plot_folde
file_path = os.path.join(location, filename)
try:
with open(file_path, 'w') as json_file:
with open(file_path, 'w', encoding='utf-8') as json_file:
json.dump(aggregated_data, json_file, indent=4)
print(f"All data successfully saved to {file_path}")
except Exception as e:
......@@ -260,7 +263,7 @@ def get_subdiv_ids_n_cords(file_path):
# reads file and gets all ids and cords at this format [(id, x, y), (id, x, y) ... ]
id_list = []
with open(file_path, 'r') as file:
with open(file_path, 'r', encoding='utf-8') as file:
for line in file:
data = line.strip().split(',')
if len(data) == 3:
......@@ -299,8 +302,12 @@ def update_data(from_date=None, to_date=None, lake_name="skumsjoen",
# Add the filtered data to the list
filtered_data_for_dates.append((sub_div_id, filtered_data))
filtered_data_for_dates.sort(key=lambda x: x[0])
jsonify_data_sub_div_ids(lake_name, filtered_data_for_dates, location=se.plot_folder)
return
return filtered_data_for_dates
if __name__ == "__main__":
'''
......@@ -321,13 +328,13 @@ if __name__ == "__main__":
from_date = "2024-01-10"
to_date = "2024-01-20"
update_data(from_date, to_date, lake_name="skumsjoen",
update_data(from_date, to_date, lake_name="skumsjøen",
sub_divs_folder='../../map_handler/lake_relations/skumsjøen_centers.txt', update_all_bboxes=True)
# filtered_data_for_dates = [(i[0], get_raw_dates(ice_prognosis_raw_data(sub_div_id=i[0], x=i[1], y=i[2])), from_date, to_date) for i in sub_divs ]
# without iceruns
# filtered_data_for_dates1 = [(i[0], get_raw_dates(ice_prognosis_raw_data(sub_div_id=i[0], x=i[1], y=i[2]), from_date, to_date)) for i in sub_divs ]
#filtered_data_for_dates1 = [(i[0], get_raw_dates(ice_prognosis_raw_data(sub_div_id=i[0], x=i[1], y=i[2]), from_date, to_date)) for i in sub_divs ]
#getAreaInfo.update_all_polynomials()
......
......@@ -349,7 +349,7 @@ def run_newsite(from_date, to_date, startmonth, startday, make_plots=True, plot_
# Save to array for check
# Open an output file
outf3 = open('{0}{1}_smelting.txt'.format(plot_folder, location_name), 'w')
outf3 = open('{0}{1}_smelting.txt'.format(plot_folder, location_name), 'w', encoding='utf-8')
outf3.write('maksdato ')
for i in range(len(temp)):
outf3.write(str(date[i]) + f'{temp[i]:6.2f}' + f'{(melt[i]/melt_factor):6.2f}' + '\n')
......@@ -451,9 +451,9 @@ if __name__ == "__main__":
# outf = open('{0}{1}_is_ekstremer.txt'.format(se.plot_folder, location_name), 'w')
# Create csv-file to store solid ice data
outf4 = open('{0}{1}_is_fast.csv'.format(se.plot_folder, location_name), 'w')
outf4 = open('{0}{1}_is_fast.csv'.format(se.plot_folder, location_name), 'w', encoding='utf-8')
# Create csv-file to store air temperature data
outf5 = open('{0}{1}_lufttemp.csv'.format(se.plot_folder, location_name), 'w')
outf5 = open('{0}{1}_lufttemp.csv'.format(se.plot_folder, location_name), 'w', encoding='utf-8')
# Overskrift
outf.write('Maksdato - Dato for største tykkelse av ett lag solid is\n')
......
......@@ -30,7 +30,7 @@ def unpickle_anything(file_name_and_path, print_message=True):
:return something_to_unpickle:
"""
something_to_unpickle = pickle.load( open(file_name_and_path, 'rb') )
something_to_unpickle = pickle.load( open(file_name_and_path, 'rb', encoding='utf-8') )
if print_message is True:
ml.log_and_print("[info] makepickle.py -> unpickle_anything: {0} unpickled.".format(file_name_and_path))
......
......@@ -25,7 +25,7 @@ def get_all_box():
dir_path = os.path.dirname(os.path.realpath(__file__))
file_path = os.path.join(dir_path, 'box.json')
with open(file_path, 'r') as file:
with open(file_path, 'r', encoding='utf-8') as file:
data = json.load(file)
box_list = []
......
......@@ -78,7 +78,7 @@ def statistical_request_sentinel(config, evalscript, time_interval, maxcc, bbox)
return None
def classify_ice(data):
""" set a label in data based on st deviation >11 cloud, >6 no ice, >2 some ice, <=2 ice"""
""" set a label in data based on st deviation"""
ice_conditions = []
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment