diff --git a/server/ModelFromNVE/output/plots/skumsjoen_sub_div.json "b/server/ModelFromNVE/output/plots/skumsj\303\270en_sub_div.json"
similarity index 100%
rename from server/ModelFromNVE/output/plots/skumsjoen_sub_div.json
rename to "server/ModelFromNVE/output/plots/skumsj\303\270en_sub_div.json"
diff --git a/server/__pycache__/consts.cpython-311.pyc b/server/__pycache__/consts.cpython-311.pyc
index bb38aa0189f45c62dc5b19e249a301babbd4ce76..eb4afa133d16daaf3ab0bd3a76b2116f6953d098 100644
Binary files a/server/__pycache__/consts.cpython-311.pyc and b/server/__pycache__/consts.cpython-311.pyc differ
diff --git a/server/__pycache__/scheduler.cpython-311.pyc b/server/__pycache__/scheduler.cpython-311.pyc
index 5dfa13e3a8023a5fbb246518a9a4001310b685d7..13179f2c3d337e9604693b9203e3b97a55f38fbb 100644
Binary files a/server/__pycache__/scheduler.cpython-311.pyc and b/server/__pycache__/scheduler.cpython-311.pyc differ
diff --git a/server/consts.py b/server/consts.py
index a667ac84a59424ea4130bb890349b34e7a37ff3d..ec6507e5138f3476b5128d66ad08ab5c93711a61 100644
--- a/server/consts.py
+++ b/server/consts.py
@@ -12,3 +12,4 @@ SSL_CERT_PATH = CERT_DIR + "testCert.crt"
 # File paths
 MAP_HANDLER_PATH = "server/map_handler/"
 LAKE_RELATIONS_PATH = MAP_HANDLER_PATH + "lake_relations/"
+STATS_OUTPUT_PATH = "server/ModelFromNVE/outputs/plots/"
diff --git a/server/main.py b/server/main.py
index 0bbb4ff857d91e1128bf89b2fac348549f32d608..5971cecfb643499001d9acb4be758ac65eadd6d5 100644
--- a/server/main.py
+++ b/server/main.py
@@ -11,7 +11,7 @@ from server.scheduler import update_scheduler
 from server.consts import LAKE_RELATIONS_PATH
 from map_handler.get_lake_relation import get_map_data_handler
 from map_handler.input_new_data import input_new_Lidar_data
-from map_handler.update_measurements import update_measurements_handler, addTestData
+from map_handler.update_measurements import update_measurements_handler, add_test_data
 
 app = Flask(__name__)
 terminate_server = 0
@@ -124,7 +124,7 @@ class IceHTTP(BaseHTTPRequestHandler):
             lake_name_param = query_params.get('lake', [''])[0]
             lake_name = unquote(lake_name_param)  # Decode url param
 
-            addTestData(self, lake_name)
+            add_test_data(self, lake_name)
 
         elif self.path.startswith('/new_lidar_data'):
             parsed_path = urlparse(self.path)
diff --git a/server/map_handler/__pycache__/update_measurements.cpython-311.pyc b/server/map_handler/__pycache__/update_measurements.cpython-311.pyc
index 41296d85fecc3924b3c0adf40fb30f6899f92868..801383752939945e212637b04e598c1ae5883cd2 100644
Binary files a/server/map_handler/__pycache__/update_measurements.cpython-311.pyc and b/server/map_handler/__pycache__/update_measurements.cpython-311.pyc differ
diff --git a/server/map_handler/update_measurements.py b/server/map_handler/update_measurements.py
index 7c1e0bd9894e4dc47cfd4487698ef13f3fecef5e..cda26653e32aced76670a17bf5e663b588cd2007 100644
--- a/server/map_handler/update_measurements.py
+++ b/server/map_handler/update_measurements.py
@@ -3,8 +3,7 @@ import json
 import random
 from datetime import datetime
 
-from server.consts import LAKE_RELATIONS_PATH
-from server.ModelFromNVE.icemodellingscripts.getIceThicknessLakes import get_raw_dates, ice_prognosis_raw_data
+from server.consts import LAKE_RELATIONS_PATH, STATS_OUTPUT_PATH
 
 
 def update_measurements_handler(self, lake_name: str):
@@ -14,7 +13,7 @@ def update_measurements_handler(self, lake_name: str):
     self.send_header("Content-type", "application/json")
     self.end_headers()
 
-    self.wfile.write(measurement_data.encode('utf-8'))
+    self.wfile.write(measurement_data)
 
 
 def update_measurements(lake_name: str) -> (int, str):
@@ -31,18 +30,24 @@ def update_measurements(lake_name: str) -> (int, str):
             return 404, f"{lake_name} does not exists in the system"
 
         # Define file path to lidar data file
-        file_path = os.path.join(LAKE_RELATIONS_PATH, lake_name + '_lidar_data.json')
+        lidar_data_path = os.path.join(LAKE_RELATIONS_PATH, lake_name + '_lidar_data.json')
 
         # Lists to store processed data
         sub_div_ids = []
         measurements = []
 
         # Some lakes may not have any recent lidar data, so must check if the file exists
-        if os.path.exists(file_path):
+        if os.path.exists(lidar_data_path):
             # Read the newest lidar data from JSON file
-            with open(file_path, 'r') as file:
+            with open(lidar_data_path, 'r') as file:
                 lidar_data = json.load(file)
 
+            all_ice_stats = []
+            if os.path.exists(STATS_OUTPUT_PATH + lake_name + "_sub_div.json"):
+                # Tro to read ice stats from NVE model for current lake
+                with open(STATS_OUTPUT_PATH + lake_name + "_sub_div.json", 'r') as file:
+                    all_ice_stats = json.load(file)
+
             # Iterate over all fetched rows
             for measurement in lidar_data:
                 processed_subdivs = []
@@ -68,19 +73,18 @@ def update_measurements(lake_name: str) -> (int, str):
                     center_lng = round(sub_division['CenLongitude'], 4)
                     avg_thickness = sub_division['AvgThickness']
 
-                    # Read from json file
-                    # lakName_sub_div.json
-                    # Add center coord txt file
-
-                    # Retrieve ice statistics for current subdivision
-                    ice_stats = get_raw_dates(ice_prognosis_raw_data(sub_div_id=subdiv_id, x=center_lat, y=center_lng))
+                    # Initialise list for the current ice stats
+                    ice_stats = []
 
+                    print("Fails here?")
                     # Ice statistics were retrieved successfully
-                    if len(ice_stats) > 0 and len(ice_stats[0]) > 0:
+                    if len(all_ice_stats) >= subdiv_id is not None or all_ice_stats[subdiv_id] != "Null":
+                        ice_stats = all_ice_stats[subdiv_id]
                         accuracy = 3
 
+                        print("Fails here, later?")
                         # Increase accuracy by 1 if the LiDar data and NVE data have a minimal discrepancy
-                        if abs(avg_thickness - ice_stats[0]['Total ice (m)']) < 1.0:
+                        if abs(avg_thickness - all_ice_stats[subdiv_id][3]['Black ice (m)']) < 1.0:
                             accuracy = 4
                     else:  # Failed to retrieve ice statistics, initialise empty ice stats object
                         ice_stats = {
@@ -117,7 +121,7 @@ def update_measurements(lake_name: str) -> (int, str):
                 measurements.append(new_measurement)
 
         # Populate remaining non-processed subdivisions and create "invalid" or "proxy" measurement to store them
-        remaining_sub_divs = fill_remaining_subdivisions(lake_name, sub_div_ids)
+        remaining_sub_divs = fill_remaining_subdivisions(lake_name, sub_div_ids, all_ice_stats)
         proxy = {
             'MeasurementID': -1,
             'TimeMeasured': str(datetime.now()),
@@ -135,14 +139,16 @@ def update_measurements(lake_name: str) -> (int, str):
 
         # Convert list of dictionaries to JSON
         response_data = json.dumps(measurements, indent=4)
-        # Set headers
+
+        # Return data
         return 200, response_data
 
     except Exception as e:
+        print(f"Error in updating measurements: {e}")
         return 500, f"Error in updating measurements: {e}".encode('utf-8')
 
 
-def fill_remaining_subdivisions(lake_name: str, processed_ids: list):
+def fill_remaining_subdivisions(lake_name: str, processed_ids: list, all_ice_stats):
     """
     Returns a list of subdivision dictionaries for subdivisions without measurements.
 
@@ -170,10 +176,11 @@ def fill_remaining_subdivisions(lake_name: str, processed_ids: list):
                 center_lng = round(sub_div['properties']['sub_div_center'][1], 4)
 
                 # Fetch weather data for each subdivision from the NVE model
-                ice_stats = get_raw_dates(ice_prognosis_raw_data(sub_div_id=sub_div_id, x=center_lat, y=center_lng))
+                ice_stats = []
 
-                if len(ice_stats) > 0 and len(ice_stats[0]) > 0:
-                    total_ice_thickness = ice_stats[0]['Total ice (m)']
+                if len(all_ice_stats) >= sub_div_id and all_ice_stats[sub_div_id] != "Null":
+                    ice_stats = all_ice_stats[sub_div_id]
+                    total_ice_thickness = ice_stats[0]['Black ice (m)']
                     accuracy = 1
                 else:  # Initialise empty ice stats
                     ice_stats = {
@@ -199,9 +206,7 @@ def fill_remaining_subdivisions(lake_name: str, processed_ids: list):
                     'CenLatitude': center_lat,
                     'CenLongitude': center_lng,
                     'Accuracy': accuracy,
-                    # Calculate ice thickness based on total ice, temporary
-                    # 'Color': calculateColor(ice_stats[0]['Total ice (m)']),
-                    'Color': calculateColor(ice_stats[0]['Black ice (m)']),
+                    'Color': calculateColor(total_ice_thickness),
                     'IceStats': ice_stats,
                 }
                 sub_divisions.append(sub_division)
@@ -227,7 +232,7 @@ def calculateColor(thickness: float):
         return 0  # Grey
 
 
-def addTestData(self, lake_name: str):
+def add_test_data(self, lake_name: str):
     """
     Adds random test data to lake_name_lidar_data.json. This function is purly for testing, not production.
     The function overwrites the lidar data for the selected lake.
diff --git a/server/scheduler.py b/server/scheduler.py
index 91d98279a9e3617411ce631f493cf0f377d23896..4e57e3cd6e8c4921112c745d3d6c948e03f2ad57 100644
--- a/server/scheduler.py
+++ b/server/scheduler.py
@@ -14,7 +14,7 @@ def update_all_measurements(lake_names: list):
         from_date = "2024-01-10"
         to_date = "2024-01-20"
 
-        update_data(from_date, to_date, lake_name=lake_names,
+        update_data(from_date, to_date, lake_name=lake_names,  # NB make call every day with False, evey 4th day with true
                     sub_divs_folder = LAKE_RELATIONS_PATH + lake_names + '_centers.txt', update_all_bboxes=True)
 
         update_measurements_handler(None, lake)