Skip to content
Snippets Groups Projects
Commit 0b3093c9 authored by Joakim Aleksandersen's avatar Joakim Aleksandersen
Browse files

most of Nve's framework for season prognosis of ice on lakes

parent 35a11e12
No related branches found
No related tags found
No related merge requests found
Showing
with 7054 additions and 0 deletions
No preview for this file type
File added
{
"odata_version" : "v3.2.0",
"web_api_version" : "v5",
"forecast_api_version" : "v3.0.0",
"registration_basestring" : "http://www.regobs.no/Registration/",
"chart_server_basestring" : "http://h-web01.nve.no/chartserver/ShowData.aspx?req=getchart&ver=1.0&vfmt=json",
"frost_client_id" : "########-####-####-####-############",
"search_url" : "https://api.regobs.no/v5/Search",
"count_url" : "https://api.regobs.no/v5/Search/Count",
"kdv_url" : "https://api.regobs.no/v5/KdvElements?langkey=1&isActive=true&sortOrder=true"
}
\ No newline at end of file
{
"output" : "/output/",
"plots" : "/output/plots/",
"sesong_plots" : "/output/plots/sesong/",
"ni_dogn_plots" : "/output/plots/9dogn/",
"local_storage" : "/localstorage/",
"kdv_elements" : "/localstorage/",
"logs" : "/logs/",
"resources" : "/resources/",
"time_series_data" : "/resources/timeseriesdata/",
"data_sets" : "/resources/datasets/",
"logos" : "/resources/logos/",
"reference_lakes_input_json_file": "/resources/referansevann/referansevann.json",
"reference_lakes_output_json_folder" : "/output/plots/reference_lakes/",
"reference_lakes_plot_folder" : "/output/plots/reference_lakes/",
"reference_lakes_adjusted_location" : "/resources/referansevann/Adjusted_location_for_GTS.json"
}
{
"odata_version" : "v3.2.0",
"web_api_version" : "v5",
"forecast_api_version" : "v3.0.0",
"registration_basestring" : "http://www.regobs.no/Registration/",
"chart_server_basestring" : "http://h-web01.nve.no/chartserver/ShowData.aspx?req=getchart&ver=1.0&vfmt=json",
"frost_client_id" : "1b5994a2-b1dc-4bc5-9140-98f326eadc54",
"search_url" : "https://api.regobs.no/v5/Search",
"count_url" : "https://api.regobs.no/v5/Search/Count",
"kdv_url" : "https://api.regobs.no/v5/KdvElements?langkey=1&isActive=true&sortOrder=true"
}
\ No newline at end of file
{
"output" : "/output/",
"plots" : "/output/plots/",
"sesong_plots" : "/output/plots/sesong/",
"ni_dogn_plots" : "/output/plots/9dogn/",
"local_storage" : "/localstorage/",
"kdv_elements" : "/localstorage/",
"logs" : "/logs/",
"resources" : "/resources/",
"time_series_data" : "/resources/timeseriesdata/",
"data_sets" : "/resources/datasets/",
"logos" : "/resources/logos/",
"reference_lakes_input_json_file": "/resources/referansevann/referansevann.json",
"reference_lakes_output_json_folder" : "/output/plots/reference_lakes/",
"reference_lakes_plot_folder" : "/output/plots/reference_lakes/",
"reference_lakes_adjusted_location" : "/resources/referansevann/Adjusted_location_for_GTS.json"
}
from icemodellingscripts import plotlakes as pl
import server.ModelFromNVE.setenvironment as se # <- alle instannser er se.plotfolder
import csv
import json
import utm
from datetime import datetime
def get_ice_thickness_given_date(date, modeldata8, modeldata9):
"""
This function finds the ice thickness for a given date by matching the date in modeldata8
and returning the corresponding value in modeldata9.
Parameters:
date: The specific date for which the ice thickness is needed.
modeldata8: A list of dates.
modeldata9: A list of ice thickness values corresponding to the dates in modeldata8.
Returns:
:Float - The ice thickness for the given date, or None if the date is not found.
"""
try:
# Find the index of the given date in modeldata8
num = modeldata8.index(date)
# Return the corresponding ice thickness from modeldata9
return modeldata9[num]
except ValueError:
# If the date is not found in modeldata8, return None
return None
# no need fro this one
def get_icethickness_date_from_csv(date, location_name, plot_folder = se.plot_folder):
"""
Reads ice thickness data from a CSV file for a given date and location, then returns the ice thickness for that date.
Parameters:
date: The specific date for which the ice thickness is needed, as a datetime object.
location_name: The name of the location, used to identify the correct CSV file.
plot_folder: The folder where the CSV files are stored. Defaults to the plot_folder defined in the 'se' module.
Returns:
:Float - The ice thickness for the given date as a float, or None if the date is not found in the CSV.
"""
# Format the date as DD.MM.YY for matching with the dates in the CSV
date_str = date.strftime("%d.%m.%y")
# Construct the path to the CSV file for the given location
csv_path = f"{plot_folder}{location_name}_is_fast.csv"
try:
with open(csv_path, mode='r', encoding='utf-8') as csvfile:
reader = csv.reader(csvfile, delimiter=';')
for row in reader:
if row[0] == date_str:
return float(row[1].replace(',', '.'))
except FileNotFoundError:
print(f"No data file found for {location_name} inside: {plot_folder}.")
except Exception as e:
print(f"An error occurred: {e}")
# Return None if the date is not found or in case of an error
return None
# no need fro this one
def plot_lakes_simplified(x, y, altitude, location_name, year, plot_folder = se.plot_folder, icerun_dates = []):
"""
Parameters:
x: UTM-east zone 33
y: UTM-north zone 33
altitude: altitude in m
location_name: just name
year: 2023 = end og 2023 + start of 2024
param icerun_dates: dates when an ice run has cleaned the river for ice. - Ice reset to ice free
Returns:
:modeldata - List of data for:
:modeldata[0] - date for thickest single layer
:modeldata[1] - thickest single layer
:modeldata[2] - slush_ice thickness in thickest single layer
:modeldata[3] - black_ice thickness in thickest single layer
:modeldata[4] - date for biggest sum of solid ice layers
:modeldata[5] - biggest sum of solid ice layers
:modeldata[6] - date for biggest sum of all ice layers (ice thickness)
:modeldata[7] - biggest sum of all ice layers (ice thickness)
:modeldata[8] - list of dates with ice calculations
:modeldata[9] - list of sum of solid ice at each ice calculation
:modeldata[10] - list of dates with input air temperatures
:modeldata[11] - list of input air temperatures
"""
####################################################################################################################
# Her er antatte vanntemperaturer ved gitte lufttemperaturer
# En array for hver måned med kopier i endene.
# Dataene er derfor desember, januar, februar, .... , desember, januar. 14 elementer
# Merk at mai-november er tulledata. Dette er utenfor isleggingen i Numedalslågen
awt = [[[-20, 0], [-15, 0], [-10, 0], [-5, 0.5], [0, 1.0], [5, 1.5], [10, 2.0], [15, 2.2], [20, 2.4]], \
[[-20, 0], [-15, 0], [-10, 0], [-5, 0.1], [0, 0.1], [5, 0.2], [10, 0.2], [15, 0.3], [20, 0.4]], \
[[-20, 0], [-15, 0], [-10, 0], [-5, 0.1], [0, 0.2], [5, 0.3], [10, 0.3], [15, 0.4], [20, 0.5]], \
[[-20, 0], [-15, 0], [-10, 0], [-5, 0.3], [0, 0.8], [5, 1.6], [10, 2.6], [15, 2.8], [20, 3.0]], \
[[-20, 0], [-15, 0], [-10, 0], [-5, 0.4], [0, 1.9], [5, 3.3], [10, 4.5], [15, 5.0], [20, 5.5]], \
[[-20, 0], [-15, 0], [-10, 0], [-5, 0.8], [0, 2.7], [5, 6.0], [10, 7.2], [15, 8.3], [20, 10.4]], \
[[-20, 0], [-15, 0], [-10, 0], [-5, 0.8], [0, 2.7], [5, 6.0], [10, 7.2], [15, 8.3], [20, 10.4]], \
[[-20, 0], [-15, 0], [-10, 0], [-5, 0.8], [0, 2.7], [5, 6.0], [10, 7.2], [15, 8.3], [20, 10.4]], \
[[-20, 0], [-15, 0], [-10, 0], [-5, 0.8], [0, 2.7], [5, 6.0], [10, 7.2], [15, 8.3], [20, 10.4]], \
[[-20, 0], [-15, 0], [-10, 0], [-5, 0.8], [0, 2.7], [5, 6.0], [10, 7.2], [15, 8.3], [20, 10.4]], \
[[-20, 0], [-15, 0], [-10, 0], [-5, 0.7], [0, 1.9], [5, 3.3], [10, 4.5], [15, 5.0], [20, 5.5]], \
[[-20, 0], [-15, 0], [-10, 0], [-5, 0.6], [0, 0.8], [5, 1.6], [10, 2.6], [15, 2.8], [20, 3.0]], \
[[-20, 0], [-15, 0], [-10, 0], [-5, 0.5], [0, 1.0], [5, 1.5], [10, 2.0], [15, 2.2], [20, 2.4]], \
[[-20, 0], [-15, 0], [-10, 0], [-5, 0.1], [0, 0.1], [5, 0.2], [10, 0.2], [15, 0.3], [20, 0.4]]]
mf = 0.04
"""
awt = []
mf = []
"""
####################################################################################################################
fdate = str(year) + '-10-01'
ldate = str(year + 1) + '-05-31'
return pl.run_newsite(fdate, ldate, 10, 1, forcing='grid', location_name=location_name,
plot_folder=plot_folder, x=x, y=y, altitude=altitude, icerun_dates=icerun_dates,
melt_factor=mf, air_water_temp=awt)
# no need fro this one
def update_csv(location_name, dates_ice, sum_ice, dates_air, air_temp, folder = se.plot_folder):
# Create csv-file to store solid ice data
outf4 = open('{0}{1}_is_fast.csv'.format(folder, location_name), 'w')
# Create csv-file to store air temperature data
outf5 = open('{0}{1}_lufttemp.csv'.format(folder, location_name), 'w')
# Save solid ice data
for k in range(len(dates_ice)):
outf4.write(dates_ice[k].strftime("%d.%m.%y") + ';' + f'{sum_ice[k]:6.2f}'.replace('.', ',') + '\n')
# Save air temperature data
for k in range(len(dates_air)):
outf5.write(dates_air[k].strftime("%d.%m.%y") + ';' + f'{air_temp[k]:6.1f}'.replace('.', ',') + '\n')
outf4.close()
outf5.close()
# no need fro this one
def update_json(location_name, dates_ice, sum_ice, dates_air, air_temp, folder=se.plot_folder):
# Prepare data for solid ice in JSON format
ice_data = [{"date": date.strftime("%d.%m.%y"), "sum_ice": f'{value:6.2f}'.replace('.', ',')}
for date, value in
zip(dates_ice, sum_ice)]
# Prepare data for air temperature in JSON format
air_data = [{"date": date.strftime("%d.%m.%y"), "air_temp": f'{value:6.1f}'.replace('.', ',')}
for date, value in
zip(dates_air, air_temp)]
# Save solid ice data as JSON
with open(f'{folder}{location_name}_is_fast.json', 'w') as outf4:
json.dump(ice_data, outf4, indent=4)
# Save air temperature data as JSON
with open(f'{folder}{location_name}_lufttemp.json', 'w') as outf5:
json.dump(air_data, outf5, indent=4)
def update_json(location_name, data, folder = se.plot_folder):
#temp = [{"id": data[0], "x":data[1], "y":data[2], "date": data[3].strftime("%d.%m.%y"), "total_ice":data[4]}
temp = [{"sub_div_id": str(i), "x": f"{x:.4f}", "y": f"{y:.4f}", "date": str(datetime.strptime(date, '%Y-%m-%d')), "total_ice": total_ice}
for i, x, y, date, total_ice in data]
with open(f'{folder}{location_name}_is_test.json', 'w') as outf4:
json.dump(temp, outf4, indent=4)
def write_ice_info_for_points_based_on_date(cordinates_and_ids, date, altitude, location_name,
plot_folder = se.plot_folder, icerun_dates = []):
"""
Args:
cordinates_and_ids: [(id, [x,y]), (id, [x,y])]
date:
Returns:
Nothing, wirtes a json with that is structured like this:
{
sub_div_id : ...
x : ...
y : ...
date : ...
total_ice : ...
}
...
{
"""
data = []
year = int(date[0:4]) # get from date
fdate = str(year) + '-10-01'
ldate = str(year + 1) + '-05-31'
####################################################################################################################
# Her er antatte vanntemperaturer ved gitte lufttemperaturer
# En array for hver måned med kopier i endene.
# Dataene er derfor desember, januar, februar, .... , desember, januar. 14 elementer
# Merk at mai-november er tulledata. Dette er utenfor isleggingen i Numedalslågen
awt = [[[-20, 0], [-15, 0], [-10, 0], [-5, 0.5], [0, 1.0], [5, 1.5], [10, 2.0], [15, 2.2], [20, 2.4]], \
[[-20, 0], [-15, 0], [-10, 0], [-5, 0.1], [0, 0.1], [5, 0.2], [10, 0.2], [15, 0.3], [20, 0.4]], \
[[-20, 0], [-15, 0], [-10, 0], [-5, 0.1], [0, 0.2], [5, 0.3], [10, 0.3], [15, 0.4], [20, 0.5]], \
[[-20, 0], [-15, 0], [-10, 0], [-5, 0.3], [0, 0.8], [5, 1.6], [10, 2.6], [15, 2.8], [20, 3.0]], \
[[-20, 0], [-15, 0], [-10, 0], [-5, 0.4], [0, 1.9], [5, 3.3], [10, 4.5], [15, 5.0], [20, 5.5]], \
[[-20, 0], [-15, 0], [-10, 0], [-5, 0.8], [0, 2.7], [5, 6.0], [10, 7.2], [15, 8.3], [20, 10.4]], \
[[-20, 0], [-15, 0], [-10, 0], [-5, 0.8], [0, 2.7], [5, 6.0], [10, 7.2], [15, 8.3], [20, 10.4]], \
[[-20, 0], [-15, 0], [-10, 0], [-5, 0.8], [0, 2.7], [5, 6.0], [10, 7.2], [15, 8.3], [20, 10.4]], \
[[-20, 0], [-15, 0], [-10, 0], [-5, 0.8], [0, 2.7], [5, 6.0], [10, 7.2], [15, 8.3], [20, 10.4]], \
[[-20, 0], [-15, 0], [-10, 0], [-5, 0.8], [0, 2.7], [5, 6.0], [10, 7.2], [15, 8.3], [20, 10.4]], \
[[-20, 0], [-15, 0], [-10, 0], [-5, 0.7], [0, 1.9], [5, 3.3], [10, 4.5], [15, 5.0], [20, 5.5]], \
[[-20, 0], [-15, 0], [-10, 0], [-5, 0.6], [0, 0.8], [5, 1.6], [10, 2.6], [15, 2.8], [20, 3.0]], \
[[-20, 0], [-15, 0], [-10, 0], [-5, 0.5], [0, 1.0], [5, 1.5], [10, 2.0], [15, 2.2], [20, 2.4]], \
[[-20, 0], [-15, 0], [-10, 0], [-5, 0.1], [0, 0.1], [5, 0.2], [10, 0.2], [15, 0.3], [20, 0.4]]]
mf = 0.04
"""
awt = []
mf = []
"""
####################################################################################################################
# for every id/x/y do prognose on are x y
for i in cordinates_and_ids:
x = i[1][0]
y = i[1][1]
cords = utm.from_latlon(y, x, 33)
temp = pl.run_newsite(fdate, ldate, 10, 1, forcing='grid', location_name=location_name,
plot_folder=plot_folder, x=int(cords[0]), y=int(cords[1]), altitude=altitude,
icerun_dates=icerun_dates, make_plots=False, melt_factor=mf, air_water_temp=awt)
## handle plots ??? how iduno
data.append([i[0], x, y, date, get_ice_thickness_given_date(date, temp[8], temp[9])])
update_json(location_name, data, plot_folder)
return
if __name__ == "__main__":
location_name = 'Stjørdal'
se.plot_folder += 'Exemple\\'
# https://www.kartverket.no/til-lands/posisjon/transformere-koordinater-enkeltvis for å finne x og y
data = plot_lakes_simplified(316405, 7042107, 33, location_name, 2023, plot_folder=se.plot_folder)
#data = plot_lakes_simplified(266708, 6749365, 123, "Mjøsa", 2023, plot_folder=se.plot_folder)
mjos = utm.to_latlon(266708, 6749365, 33, northern=True)
print(f"Mjøsa kordinater : {mjos}" )
print(f"Mjøsa kordinater : {utm.from_latlon(mjos[0], mjos[1], 33)}" )
update_csv(location_name, data[8], data[9], data[10], data[11], se.plot_folder)
#update_json(location_name, data[8], data[9], data[10], data[11], se.plot_folder)
print(f"Total ice thiccness for date: {data[8][57]} is {get_icethickness_date_from_csv(data[8][57], location_name, se.plot_folder)} or",
f"{get_ice_thickness_given_date(data[8][57], data[8], data[9])}")
temp = [(1, [10.709985478463118, 60.810991171403316]), (2, [10.709985478463118, 60.810991171403316])]
write_ice_info_for_points_based_on_date(temp, '2023-01-22', 123, 'Mjøsa')
pass
\ No newline at end of file
File added
File added
File added
File added
File added
#! /usr/bin/python
# -*- coding: utf-8 -*-
"""Describe the universe! All constants for modelling."""
__author__ = 'Ragnar Ekker'
# Last update: 12.02.2024 aask
# Misc constants
sigma_pr_second = 5.6704*10**-8 # Boltzmann constants pr second [J/s/m2/K4]
temp_f = 0. # [degC] freezing temp for fresh water
absolute_zero = -273.15 # [degC] 0K is -273.15C
temp_rain_snow = 0.5 # [degC] Threshold where precipitation falls as snow or rain.
von_karmans_const = 0.41 # [-] von Karmans constant
solar_constant = 1.3608 *10**3 # [J/s/m2] https://en.wikipedia.org/wiki/Solar_constant
g = 9.81 # [m/s2] Acceleration of gravity
# Weather data if otherwise not given
avg_wind_const = 3.74 # [m/s] if nothing else is given. From TVEDALEN 2015
pressure_atm = 101.1 # [kPa] if nothing else is given.
rel_hum_air = 0.8349 # [-] if nothing else is given. From TVEDALEN 2015
laps_rate = 2./300 # [C/m] temperature in atmosphere decreases by 2C pr 300 meters elevation
clear_sky_temp_drop = 2. # How much lower the surface temperature at the lake is than the 2 m air temperature on land, when no clouds present
# Base properties of slush. The relation between ice and water influences the properties:
# k_slush_ice, rho_slush, c_slush, snow_to_slush_ratio
part_ice_in_slush = 0.70 # How big part of slush is frozen?
# Thermal Conductivities [W/m/K]
k_snow_max = 0.25 # from http://www.engineeringtoolbox.com/thermal-conductivity-d_429.html
k_new_snow = 0.05 # from http://www.engineeringtoolbox.com/thermal-conductivity-d_429.html
k_snow = 0.11 # from http://www.engineeringtoolbox.com/thermal-conductivity-d_429.html
k_drained_snow = k_snow #
k_black_ice = 2.24 # from Vehvilainen (2008) and again Saloranta (2000)
k_slush_ice = 0.5 * k_black_ice # from Vehvilainen (2008) and again Saloranta (2000)
k_water = 0.58 # from http://www.engineeringtoolbox.com/thermal-conductivity-d_429.html
# k_slush = 0.561 # From Vehvilainen (2008) and again Saloranta (2000)
# ---------- Slush conductivity -------------
# Slush is assumed to consist of part_ice_in_slush of ice and (1 - part_ice_in_slush) of water.
# Assume it as a layer of ice and a layer of water.
#
# The combined conductance (U) can be calculated from the combined serial conductivity (k) and
# the total layer height (h): U = k/h.
#
# For simplicity p = part_ice_in_slush 1/U = 1/U1 + 1/U2 where U1 is the conductance for the ice part
# and U2 for the water part. This gives h/k = h1/k1 + h2/k2, where h = h1 + h2 and h2 = h * (1 - p) and h1 = h * p.
#
# This ends up with the equation: k = (k1*k2) / (k1 * (1 - p) + k2 * p) (= 0.84 for p=0.64)
# --------------------------------------------
k_slush = (k_slush_ice * k_water) / (k_slush_ice * (1 - part_ice_in_slush) + k_water * part_ice_in_slush)
# Densities [kg m^-3]
rho_snow_max = 450. # maximum snow density (kg m-3)
rho_new_snow = 250. # new-snow density (kg m-3)
rho_snow = 350. # average of max and min densities.
# rho_snow_max = 400. # maximum snow density (kg m-3)
# rho_new_snow = 200. # new-snow density (kg m-3)
# rho_snow = 300. # average of max and min densities.
rho_drained_snow = rho_snow #
rho_slush_ice = 0.875*10**3 # from Vehvilainen (2008) and again Saloranta (2000)
rho_black_ice = 0.917*10**3 # ice (incl. snow ice) density [kg m-3]
rho_water = 1000. # density of freshwater (kg m-3)
rho_air = 1.29 # kg/m3
# rho_slush = 0.920*10**3 # From Vehvilainen (2008) and again Saloranta (2000)
# ------------- Slush density ----------
# We slush is part and part water with at ratio given by "part_ice_in_slush".
# Solving the equation below for rho_slush = 0.920*10**3 we get part_ice_in_slush=0.64.
# --------------------------------------
rho_slush = part_ice_in_slush * rho_slush_ice + (1 - part_ice_in_slush) * rho_water
# Latent heat of fusion [J kg-1]
L_fusion = 333.5 * 10**3 # latent heat of fusion
L_vapour = 2260. * 10**3 # latent heat of vapour
L_0to100C = 418 * 10**3 # heat in warming 1kg water from 0 to 100 C
L_sublimation = L_fusion + L_vapour # Latent heat of sublimation. A solid going directly to vapor
# Molecular wights
molecular_weight_dry_air = 28.966 # [g/mol]
molecular_weight_water = 18.016 # [g/mol]
molecular_weight_ratio = 0.622 # [-] The ratio of the molecular weights of water and dry air
# Specific heat capacities [J/kg/K]
c_air = 1.01 * 10**3 # heat capacity of air
c_water = 4.19 * 10**3 # heat capacity of water
c_snow = 2.102 * 10**3 # heat capacity of snow from Dingman p. 189
c_ice = c_snow # at 0C. http://www.engineeringtoolbox.com/ice-thermal-properties-d_576.html
# Slush is assumed to consist of part_ice_in_slush of ice and (1 - part_ice_in_slush) of water.
# Think of it as a layer of ice and a layer of water.
c_slush = c_ice * part_ice_in_slush + c_water * (1 - part_ice_in_slush)
# Surface roughness as used to calculate turbulent fluxes.
# Surface roughness defined as height where wind field becomes zero
z_snow = 2*10**-3 # [m] surface roughness for snow
z_new_snow = z_snow # [m] surface roughness for
z_drained_snow = z_snow # [m] surface roughness for
z_black_ice = 10**-4 # [m] surface roughness for
z_slush_ice = z_black_ice # [m] surface roughness for
z_water = z_black_ice # [m] surface roughness for
z_slush = z_water # [m] surface roughness for
# Albedo in values [0,1]
alfa_black_ice = 0.5 #
alfa_snow_new = 0.85 # from http://en.wikipedia.org/wiki/Albedo
alfa_snow_old = 0.45 #
alfa_slush_ice = 0.75 #
alfa_max = 0.95 #
alfa_bare_ground = 0.25 # Generic albedo for snow less ground (Campell, 1977)
# Emissivities are dimensionless
eps_snow = 0.97 #
eps_ice = eps_snow #
eps_water = eps_snow #
# The melting coefficients for the degree-day formula [m s-1 degC-1]
meltingcoeff_snow = -0.10 /(60*60*24) / 5 # 10cm melting pr day at 5degC
meltingcoeff_slush_ice = -0.04 /(60*60*24) / 5 # 4cm melting pr day at 5degC
meltingcoeff_slush = meltingcoeff_slush_ice*2 # slush is partly melted
meltingcoeff_black_ice = -0.02 /(60*60*24) / 5 # 2cm melting pr day at 5degC
# Conductance = thermal conductivity over height (k/h) [W/K/m2]
U_surface = k_snow/0.01 # surface conductance defined by the active part of column during 24hrs temperature oscillations
# Constants affecting how surface water interacts with snow on an ice cover
snow_pull_on_water = 0.02 # [m] The length water is pulled up into the snow by capillary forces
min_slush_change = 0.05 # [m] If slush level above water line is lower than this value, no new slush level is made
# snow to slush ratio depends on snow density and is calculated for every slush event in "update_slush_level"
# snow_to_slush_ratio = 0.33 # [-] When snow becomes slush when water is pulled up it also is compressed
# Adjustable conductivity to tweak performance in finding total conductance according to Ashton (1989)
h_min_for_conductivity_black_ice = 0.03 # The top layer is special. This defines how thick it is.
surface_k_reduction_black_ice = 0.25 # The top layers conductivity is reduced by a factor
h_min_for_conductivity_slush_ice = 0.03
surface_k_reduction_slush_ice = 1. # no reduction factor before we test more
h_min_for_conductivity_snow = 0.03
surface_k_reduction_snow = 1. # no reduction factor before we test more
This diff is collapsed.
This diff is collapsed.
# -*- coding: utf-8 -*-
import datetime as dt
import math
from server.ModelFromNVE.icemodelling import constants as const, weatherelement as we
from server.ModelFromNVE.utilities import getwsklima as gws, doconversions as dc
import random as random
__author__ = 'ragnarekker'
# Last update 13.02.2024 aask
####### Methods for arrays
def delta_snow_from_total_snow(snowTotal):
""" Method takes a list of total snow depth and returns the daily change (derivative).
:param snowTotal: a list of floats representing the total snow coverage of a location
:return: a list of floats representing the net accumulation (only positive numbers) for the timeseries
"""
snowChange = []
snowChange.append(snowTotal[0])
for i in range(1, len(snowTotal), 1):
delta = (snowTotal[i]-snowTotal[i-1])
if delta > 0:
snowChange.append(delta) # the model uses only change where it accumulates
else:
snowChange.append(0)
return snowChange
def delta_temperature_from_temperature(temp):
""" Method makes an array of temp change from yesterday to today (derivative).
:param temp: [list of floats] with the dayly avarage temperature
:return: [list of floats] the change of temperature from yesterday to today
"""
dTemp = []
previousTemp = temp[0]
for t in temp:
dTemp.append(t - previousTemp)
previousTemp = t
return dTemp
def clouds_average_from_precipitation(prec):
"""Calculates a (constant) average cloud cover of a period based on the number of precipitation days in the period.
It turns out this gave rms = 0.37 on a Semsvann study.
:param clouds_inn: [list of float] Precipitation
:return clouds_out: [list of float] Cloud cover
"""
clouds_inn = clouds_from_precipitation(prec)
average = sum(clouds_inn)/float(len(clouds_inn))
average = float("{0:.2f}".format(average))
clouds_out = [average]*len(clouds_inn)
return clouds_out
####### Methods for single times steps
def albedo_from_net_short_wave(SW_inn, SW_out):
"""
SW_out = SW_inn * albedo
:param SW_inn: [float][kJ/m2]
:param SW_out: [float][kJ/m2]
:return albedo: [float][-]
"""
albedo = SW_out/SW_inn
return albedo
def surface_temp_from_long_wave(L_t, eps_surface=const.eps_snow, time_span_in_sec=24*60*60):
"""
Tss = (L_t/(eps*sigma))^1/4 - 273.15
:param L_t: [float] [kJ/m2/day] but other time period can be set.
:param eps_surface: Optional [float]
:param time_span_in_sec: Optional [int]
:return temp_surface: [degC]
"""
temp_surface = ( L_t*time_span_in_sec / const.sigma_day/eps_surface )**(1/4) - const.absolute_zero
return temp_surface
def temperature_from_temperature_and_clouds(temp, cloud_cover):
"""
This method takes shifts temperature according to cloud_cover. In theory clear nights will have a net
out going global radiation from the surface. Here it is assumed:
* a clear night adds to the energy balance with the equivalent of "const.clear_sky_temp_drop" degrees C
:param temp: temperature [float] from met-station or equal
:param cloud_cover: cloud_cover as number from 0 to 1 [float] on site where temperature i measured
:return temp: temperature [float] radiation-corrected based on snowevents
"""
temp = temp + const.clear_sky_temp_drop*(cloud_cover - 1)
return temp
def temperature_from_temperature_and_snow(temp, new_snow):
"""
This method is a special case of the temperature_from_temperature_and_clouds method.
This method takes shifts temperature according to snow events. In theory clear nights will have a net
global radialtion outgouing from the surface. Here it is assumed:
* a clear night adds to the energybalace with the equivalent of "const.clear_sky_temp_drop" degC
* a snow event has a cloudcover CC = 100% and no new snow is assumed a clear night (CC = 0%)
:param temp: Temperature [float] from met-station or equal
:param new_snow: Snow change [float] on site where temperature i measured
:return: Temperature [float] radiation-corrected based on snow events
"""
if new_snow == 0:
temp_temp = temp - const.clear_sky_temp_drop
else:
temp_temp = temp
return temp_temp
def rho_new_snow(temp):
"""
Method found in THS archives. Must investigate...
Note, there is a different formula used in the albedo_walter() method:
rho_snow = 50 + 3.4*(temp_atm + 15)
:param temp:
:return:
"""
new_snow_density = 0.05
temp = temp * 9.0/5.0 + 32.0 # Celsius to Fahrenhet
if(temp > 0.):
new_density = new_snow_density + (temp/100)**2
else:
new_density = new_snow_density
return new_density
def k_snow_from_rho_snow(rho_snow):
"""
The heat conductivity of the snow can be calculated from its density using the equation:
k_s = 2.85 * 10E-6 * ρ_s^2
where minimum value of ρ_s is 100 kg/m3. This relation is found in [2008 Vehvilainen
ice model]. Note all constans are given in SI values.
[k] = W K-1 m-1
[ρ] = kg m-3
konstant = W m5 K-1 kg-2
:param rho_snow: density of snow
:return: estimated conductivity of snow
"""
rho_snow = max(100, rho_snow)
konstant = 2.85*10**-6
k_snow = konstant*rho_snow*rho_snow
return k_snow
# untested
def irradiance_clear_sky(utm33_x, utm33_y, date_inn, time_span_in_sec=24*60*60):
"""Clear sky irradiance in J/m2/s * time_span_in_sec.
:param utm33_x, utm33_y: koordinat i UTM 33
:param date_inn: [datetime]
:param time_span_in_sec: [sec] Time resolution in sec
:return I_clear_sky: [J/m2] Energy over the time span we are looking at.
"""
day_no = date_inn.timetuple().tm_yday
# time given as the end of the time span
time_hour = (date_inn + dt.timedelta(seconds=time_span_in_sec)).hour
if time_hour == 0:
time_hour = 24
phi, thi, ddphi, ddthi = dc.lat_long_from_utm33(utm33_x,utm33_y, output= "both")
theta = 0.4092*math.cos((2*math.pi/365.25)*(day_no-173)) # solar declination angleday angle, Liston 1995
theta2 = 2*math.pi/365.25*(day_no-80)
r = 149598000 # distance from the sun
R = 6378 # Radius of earth
timezone = -4 * (math.fabs(thi) % 15) * thi/math.fabs(thi) # ang lengdegrad ikke
epsilon = 0.4092 # rad(23.45)
z_s = r*math.sin(theta2)*math.sin(epsilon)
r_p = math.sqrt(r**2-z_s**2)
nevner = (R-z_s*math.sin(phi))/(r_p*math.cos(phi))
if(nevner > -1) and (nevner < 1):
# acos(m ha inn verdier ,(mellom-1,1) hvis <-1 s er det midnattsol > 1 er det m¯rketid.
t0 = 1440/(2*math.pi)*math.acos((R-z_s*math.sin(phi))/(r_p*math.cos(phi)))
that = t0+5
n = 720-10*math.sin(4*math.pi*(day_no-80)/365.25)+8*math.sin(2*math.pi*day_no/365.25)
sr = (n-that+timezone)/60 #soloppgang
ss = (n+that+timezone)/60 #solnedgang
if nevner <= -1: # Midnattsol
sr = 0.
ss = 24.
if nevner >= 1: # Mørketid
sr = 12.
ss = 12.
dingom = {} # Values for zenith angle (0 straight up)
for tid in range(1, 24, 1):
if (tid > sr) and (tid < ss):
tom = tid-12 # Number of hours from solar noon. Solar noon is tom=0
cosarg = 0.2618 * tom # Radians pr hour
dingom[tid] = math.acos(math.sin(phi)*math.sin(theta)+math.cos(phi)*math.cos(theta)*math.cos(cosarg)) # Dingmans zenith angle
if (tid < sr) or (tid > ss): # If time is outside sunrise-sunset
dingom[tid] = math.pi/2 # Angle below horizin is set to be on the horizon.
if time_span_in_sec == 86400:
zenith_angle = dingom.values() # list
elif time_span_in_sec < 86400: # Midler transmissvitet og solvinkel For finere tidssoppløsning
interv = list(range(time_hour-int(time_span_in_sec/3600)+1, time_hour, 1)) # aktuelle timer
zenith_angle = [dingom[i] for i in interv]
else:
print("Method doesnt work on time intervals greater than 24hrs")
zenith_angle = None
# Mean values
zenith_angle = float( sum(zenith_angle) / len(zenith_angle) )
#Solar radiation
S0 = const.solar_constant #[J/m2/s] Solar constant pr sec
S0 *= time_span_in_sec # solar constant pr time step
S0 /=1000 # J to kJ
I_clear_sky = math.sin((math.pi/2)-zenith_angle) * S0
return I_clear_sky
####### Works on both
# untested
def clouds_from_short_wave(utm33_x, utm33_y, short_wave_inn, date_inn, time_span_in_sec=24*60*60):
"""Clouds defined by: clouds = 1 - measured solar irradiance / clear-sky irradiance
:param utm33_x: []
:param utm33_y:
:param short_wave_inn: [float] in J/m2/time_step
:param date_inn:
:param time_span_in_sec: [int] Is necessary if used on non-list calculations and not on daily values.
:return cloud_cover_out:
"""
# If resources isn't list, make it so
input_is_list = False
if isinstance(short_wave_inn, list):
input_is_list = True
if input_is_list:
short_wave_list = short_wave_inn
date_list = date_inn
time_span_in_sec = (date_list[1] - date_list[0]).total_seconds()
else:
short_wave_list = [short_wave_inn]
date_list = [date_inn]
cloud_cover_list = []
for i in range(0, len(short_wave_list), 1):
I_clear_sky = irradiance_clear_sky(utm33_x, utm33_y, date_list[i], time_span_in_sec)
I_measured = short_wave_list[i]
cloud_cover = 1 - I_measured/I_clear_sky
cloud_cover_list.append(cloud_cover)
# If resources aren't lists, return only float.
if input_is_list:
cloud_cover_out = cloud_cover_list
else:
cloud_cover_out = cloud_cover_list[0]
return cloud_cover_out
# untested
def atmospheric_emissivity(temp_atm_inn, cloud_cover_inn, method="2005 WALTER"):
"""Its a start.. Incomplete
:param temp_atm:
:param cloud_cover:
:return:
"""
# If resources aren't lists, make them so
if not isinstance(temp_atm_inn, list):
temp_atm_list = [temp_atm_inn]
cloud_cover_list = [cloud_cover_inn]
else:
temp_atm_list = temp_atm_inn
cloud_cover_list = cloud_cover_inn
eps_atm_list = []
for i in range(0, len(temp_atm_list), 1):
temp_atm = temp_atm_list[i]
cloud_cover = cloud_cover_list[i]
if method=="1998 CAMPBELL":
# Atmospheric emissivity from Campbell and Norman, 1998. Emissivity er dimasnjonsløs
eps_atm = (0.72+0.005*temp_atm)*(1-0.84*cloud_cover)+0.84*cloud_cover
elif method=="2005 WALTER":
# From THS og 2005 WALTER
eps_atm = (1.0+0.0025*temp_atm)-(1-cloud_cover)*(0.25*(1.0+0.0025*temp_atm))
elif method=="1963 SWINBANK":
# From 1998 TODD eq 13
eps_atm = cloud_cover + (1-cloud_cover) * (9.36 * 10**-6 * temp_atm**2)
elif method=="1969 Idso and Jackson":
# From 1998 TODD eq 14
eps_atm = (cloud_cover + (1-cloud_cover) * (1 - (0.261 * math.exp(-7.77 * 10**-4) * (273.15 - temp_atm)**2 )))
else:
eps_atm = None
print('parameterization.py --> atmospheric_emissivity: Unknown method.')
eps_atm_list.append(eps_atm)
# Again, if resources aren't lists, return only float.
if not isinstance(temp_atm_inn, list):
eps_atm_out = eps_atm_list[0]
else:
eps_atm_out = eps_atm_list
return eps_atm_out
def clouds_from_precipitation(prec_inn, method='Binary'):
"""Takes a list of precipitation and returns cloud cover.
Method = Binary: If precipitation the cloud cover is set to 100% and of no precipitation the cloud cover
is at a chosen lower threshold (now testing at 10%).
Method = Average: Calculates a (constant) average cloud cover of a period based on the number of
precipitation days in the period. It turns out this gave rms = 0.37 on a Semsvann study.
Method = Binary and average: Method combines the above. If there is no rain one day, the average precipitation
in the period is used and if there is precipitation 100% cloud cover is used.
Method = Random Thomas: Thomas Skaugen suggests to choose random numbers in a interval based on
precipitation amount to estimate clouds. Method returns different Nash-Sutcliffe every
run but the seem to circle around the results from "Binary and average" method.
:param prec_inn: [list or single value] Precipitation
:return clouds_out: [list or single value] Cloud cover
"""
# If prec_inn isn't a list, make it so.
if not isinstance(prec_inn, list):
prec = [prec_inn]
else:
prec = prec_inn
clouds_out = []
if method == 'Binary':
for p in prec:
if p == 0:
# clouds.append(0.)
clouds_out.append(0.1) # THS suggests a lower threshold
else:
clouds_out.append(1.)
if method == 'Average':
clouds = []
for p in prec:
if p == 0:
clouds.append(0.)
else:
clouds.append(1.)
average = sum(clouds)/float(len(clouds))
average = float("{0:.2f}".format(average))
clouds_out = [average]*len(clouds)
if method == 'Binary and average':
clouds_out_1 = clouds_from_precipitation(prec, method='Binary')
clouds_out_2 = clouds_from_precipitation(prec, method='Average')
for i in range(0, len(clouds_out_1), 1):
clouds_out.append(min(clouds_out_1[i]+clouds_out_2[i], 1))
if method == 'Random Thomas':
# Thomas Skaugen suggests to choose random numbers in a interval based on precipitation amount
# to estimate clouds.
for p in prec:
if p > 5./1000:
clouds_out.append(1.)
elif 0. < p <= 5./1000:
clouds_out.append(0.8+random.random()/5.) # random numbers between 0.8 an 1.0
if p == 0.:
clouds_out.append(0.4+random.random()*3./10.) # random numbers between 0.4 and 0.7
# Again, if prec_inn isn't a list, return only a float.
if not isinstance(prec_inn, list):
clouds_out = clouds_out[0]
return clouds_out
def __test_clouds_from_short_wave():
date_list = [dt.date.today() - dt.timedelta(days=x) for x in range(0, 365)]
date_list = [dt.datetime.combine(d, dt.datetime.min.time()) for d in date_list]
date_list.reverse()
# test Nordnesfjellet i Troms
station_id = 91500
short_wave_id = 'QSI'
long_wave_id = 'QLI'
temperature_id = 'TA'
timeseries_type = 2
utm_e = 711075
utm_n = 7727719
short_wave = gws.getMetData(station_id, short_wave_id, date_list[0], date_list[-1], timeseries_type)
long_wave = gws.getMetData(station_id, long_wave_id, date_list[0], date_list[-1], timeseries_type)
temperature = gws.getMetData(station_id, temperature_id, date_list[0], date_list[-1], timeseries_type)
short_wave = we.fix_data_quick(short_wave)
long_wave = we.fix_data_quick(long_wave)
temperature_daily = we.fix_data_quick(temperature)
short_wave_daily = we.make_daily_average(short_wave)
short_wave_daily = we.multiply_constant(short_wave_daily, 24 * 3600 / 1000) # Wh/m2 * 3600 s/h * kJ/1000J (energy) over 24hrs
long_wave_daily = we.make_daily_average(long_wave)
long_wave_daily = we.multiply_constant(long_wave_daily, 24 * 3600 / 1000)
temperature_daily = we.make_daily_average(temperature)
Short_wave_list = we.strip_metadata(short_wave_daily)
I_clear_sky_list = [irradiance_clear_sky(utm_e, utm_n, d) for d in date_list]
Cloud_cover = clouds_from_short_wave(utm_e, utm_n, Short_wave_list, date_list)
import matplotlib.pyplot as plt
plt.plot(date_list, Short_wave_list)
plt.plot(date_list, I_clear_sky_list)
plt.plot(date_list, Cloud_cover)
plt.ylim(0, 50000)
plt.show()
if __name__ == "__main__":
__test_clouds_from_short_wave()
a = 1
This diff is collapsed.
File added
This diff is collapsed.
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment