Skip to content
Snippets Groups Projects
Commit 360aa196 authored by Joakim Aleksandersen's avatar Joakim Aleksandersen
Browse files

add: work torwards making the icemodel be more compatible with our system

parent 0b3093c9
No related branches found
No related tags found
No related merge requests found
......@@ -228,9 +228,9 @@ def write_ice_info_for_points_based_on_date(cordinates_and_ids, date, altitude,
y = i[1][1]
cords = utm.from_latlon(y, x, 33)
temp = pl.run_newsite(fdate, ldate, 10, 1, forcing='grid', location_name=location_name,
temp = pl.run_newsite(fdate, ldate, 10, 1, forcing='grid', location_name=location_name + f"_sib_div_id_{i[0]}",
plot_folder=plot_folder, x=int(cords[0]), y=int(cords[1]), altitude=altitude,
icerun_dates=icerun_dates, make_plots=False, melt_factor=mf, air_water_temp=awt)
icerun_dates=icerun_dates, make_plots=True, melt_factor=mf, air_water_temp=awt)
## handle plots ??? how iduno
data.append([i[0], x, y, date, get_ice_thickness_given_date(date, temp[8], temp[9])])
......@@ -258,7 +258,8 @@ if __name__ == "__main__":
print(f"Total ice thiccness for date: {data[8][57]} is {get_icethickness_date_from_csv(data[8][57], location_name, se.plot_folder)} or",
f"{get_ice_thickness_given_date(data[8][57], data[8], data[9])}")
temp = [(1, [10.709985478463118, 60.810991171403316]), (2, [10.709985478463118, 60.810991171403316])]
write_ice_info_for_points_based_on_date(temp, '2023-01-22', 123, 'Mjøsa')
temp = [(1, [10.709985478463118, 60.810991171403316]), (2, [10.709985478463118, 60.850991171403316])]
write_ice_info_for_points_based_on_date(temp, '2023-01-22', 123, 'Mjøsa', se.plot_folder)
pass
\ No newline at end of file
import copy
import datetime as dt
import utm
import os
import json
import server.ModelFromNVE.setenvironment as se
from server.ModelFromNVE.icemodelling import parameterization as dp, icethickness as it, weatherelement as we, \
ice as ice
from server.ModelFromNVE.utilities import makeplots as pts, getgts as gts, getwsklima as gws
def ice_prognosis_raw_data(to_date = None, sub_div_id = 0, met_stnr=0, x=10.709985478463118, y=60.810991171403316, altitude=0, awt=[], mf=0, icerun_dates=[]):
"""
lon x
lat y
:return:
"""
current_date = dt.datetime.now()
if current_date.month < 10:
from_date = dt.datetime(current_date.year - 1, 10, 1)
else:
from_date = dt.datetime(current_date.year, 10, 1)
if to_date is None or to_date > dt.datetime.now():
to_date = dt.datetime.now() + dt.timedelta(days=7)
tyear, tmonth, tday = from_date.year, from_date.month, from_date.day
if tmonth > 7 and tday < 7:
tyear += 1
first_ice = ice.IceColumn(from_date, [])
first_ice.add_metadata('LocationName', sub_div_id) # Using sub_div_id as the location name for metadata
observed_ice = [first_ice]
# make the x and y into utm 33 from lon lat
cords = utm.from_latlon(y, x, 33)
x, y = int(cords[0]), int(cords[1])
gridTemp = gts.getgts(x, y, 'tm', from_date, to_date)
gridSno = gts.getgts(x, y, 'sdfsw', from_date, to_date)
gridSnoTot = gts.getgts(x, y, 'sd', from_date, to_date)
# In steep terrain may the grid altitude be different from the wanted height. Adjust for that.
gridTempNewElevation = we.adjust_temperature_to_new_altitude(gridTemp, altitude)
temp, date = we.strip_metadata(gridTempNewElevation, get_date_times=True)
sno = we.strip_metadata(gridSno)
sno_tot = we.strip_metadata(gridSnoTot)
cc = dp.clouds_from_precipitation(sno)
air_temp_date = []
air_temp_value = []
for i in range(len(gridTemp)):
air_temp_date.append(gridTemp[i].Date)
air_temp_value.append(gridTemp[i].Value)
if len(awt) > 0:
calculated_ice = None
else:
calculated_ice = it.calculate_ice_cover_air_temp(copy.deepcopy(first_ice), date, temp, sno, cloud_cover=cc,
icerun_dates=icerun_dates)
###
# cumulated ammount of each ice type at a given date
slush_ice = []
black_ice = []
total = []
total_ice = []
dates = []
for i in calculated_ice:
ice_type = -10
black_ice_daily = 0
slush_ice_daily = 0
all_layer_ice_daily = 0
all_layer_daily = 0
change = False
date = i.date
for j in i.column:
ice_type = j.get_enum()
ice_height = j.height
if ice_type == 10:
black_ice_daily += ice_height
elif ice_type == 11:
slush_ice_daily += ice_height
all_layer_ice_daily += ice_height
dates.append(date)
slush_ice.append(slush_ice_daily)
black_ice.append(black_ice_daily)
total_ice.append(all_layer_ice_daily)
data = []
for date, slush, black, total, snow2, sno_tot2, cc2, temp2 in zip(dates, slush_ice, black_ice, total_ice, sno,
sno_tot, cc, temp):
daily_data = {
"Date": date.strftime("%Y-%m-%d"),
"Slush ice (m)": slush,
"Black ice (m)": black,
"Total ice (m)": total,
"Snow depth (m)": snow2,
"Total snow (m)": sno_tot2,
"Cloud cover": cc2,
"Temperature (c)": temp2
}
data.append(daily_data)
return data
# return [sub_div_id, x, y, data]
def get_raw_dates(data, from_date=None, to_date=None):
if from_date is None:
from_date = (dt.datetime.now() - dt.timedelta(days=3)).strftime("%Y-%m-%d")
if to_date is None:
to_date = (dt.datetime.now() + dt.timedelta(days=4)).strftime("%Y-%m-%d")
filtred_data = [entry for entry in data if from_date <= entry["Date"] <= to_date]
return filtred_data
# change to take a list of data with an sub div id first followed by data [sub_div_id, data]
def jsonify_data(data, location=se.plot_folder):
os.makedirs(location, exist_ok=True)
file_path = os.path.join(location, "data_test_test1.json")
try:
with open(file_path, 'w') as json_file:
json.dump(data, json_file, indent=4)
print(f"Data successfully saved to {file_path}")
except Exception as e:
print(f"Failed to save data to JSON file. Error: {e}")
def jsonify_data_sub_div_ids(sub_div_and_data, location=se.plot_folder):
aggregated_data = {entry[0]: entry[1] for entry in sub_div_and_data}
os.makedirs(location, exist_ok=True)
filename = "aggregated_data_test_test.json"
file_path = os.path.join(location, filename)
try:
with open(file_path, 'w') as json_file:
json.dump(aggregated_data, json_file, indent=4)
print(f"All data successfully saved to {file_path}")
except Exception as e:
print(f"Failed to save data to JSON file. Error: {e}")
def expose_data()
if __name__ == "__main__":
data = ice_prognosis_raw_data()
from_date = "2024-01-10"
to_date = "2024-01-20"
filtered_dates = get_raw_dates(data, from_date, to_date)
jsonify_data(filtered_dates)
filtered_dates2 = get_raw_dates(data)
all_will_be_one = [[1, filtered_dates2], [2, filtered_dates]]
jsonify_data_sub_div_ids(all_will_be_one)
#ice_prognosis()
pass
\ No newline at end of file
......@@ -448,7 +448,7 @@ if __name__ == "__main__":
se.plot_folder += 'Stjørdal-2\\'
# Open an output file for extremes
outf = open('{0}{1}_is_ekstremer.txt'.format(se.plot_folder, location_name), 'w')
# outf = open('{0}{1}_is_ekstremer.txt'.format(se.plot_folder, location_name), 'w')
# Create csv-file to store solid ice data
outf4 = open('{0}{1}_is_fast.csv'.format(se.plot_folder, location_name), 'w')
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment