diff --git a/app/lib/consts.dart b/app/lib/consts.dart
index 97d7ba83ad6bd2efd6fa68549901ce6afce136d0..13b7ca3b3bbebe953f24e62f53e93072154af512 100644
--- a/app/lib/consts.dart
+++ b/app/lib/consts.dart
@@ -17,6 +17,8 @@ List<Measurement> selectedMarkerList = [];
 LatLng mapCenter = LatLng(60.8000, 10.8471);
 DateTime ?lastUpdate; // Last time data was fetched from server
 
+List<String> lakeSearchOptions = []; // Init empty
+
 // Font settings
 const textColor = Colors.white;
 final appTitleStyle = GoogleFonts.chakraPetch(
diff --git a/app/lib/utils/custom_search_delegate.dart b/app/lib/utils/custom_search_delegate.dart
index 19366184f55fbb035f932a5758b290654e92925c..13c62cf94ff6ad779d4d2dfa612fb4af82a64711 100644
--- a/app/lib/utils/custom_search_delegate.dart
+++ b/app/lib/utils/custom_search_delegate.dart
@@ -1,6 +1,8 @@
 import 'package:fuzzy/fuzzy.dart';
 import 'package:flutter/material.dart';
 
+import '../consts.dart';
+
 typedef SearchResultCallback = void Function(String result);
 
 class CustomSearchDelegate extends SearchDelegate {
@@ -8,17 +10,6 @@ class CustomSearchDelegate extends SearchDelegate {
 
   CustomSearchDelegate(this.onResultSelected);
 
-  List<String> searchItems = [ // NB temp values
-    "Mjøsa",
-    "Bogstadsvannet",
-    "Einavatnet",
-    "Femsjøen",
-    "Femunden",
-    "Fjellsjøen",
-    "Gjende",
-    "Gjersjøen"
-  ];
-
   @override
   List<Widget> buildActions(BuildContext context) {
     return [
@@ -45,7 +36,7 @@ class CustomSearchDelegate extends SearchDelegate {
   Widget buildResults(BuildContext context) {
     List<String> searchResults = [];
     final options = FuzzyOptions(threshold: 0.4, findAllMatches: true);
-    final matcher = Fuzzy(searchItems, options: options);
+    final matcher = Fuzzy(lakeSearchOptions, options: options);
     final results = matcher.search(query);
     searchResults = results.map((result) => result.item as String).toList();
 
@@ -70,7 +61,7 @@ class CustomSearchDelegate extends SearchDelegate {
   Widget buildSuggestions(BuildContext context) {
     List<String> searchResults = [];
     final options = FuzzyOptions(threshold: 0.4, findAllMatches: true);
-    final matcher = Fuzzy(searchItems, options: options);
+    final matcher = Fuzzy(lakeSearchOptions, options: options);
     final results = matcher.search(query);
     searchResults = results.map((result) => result.item as String).toList();
 
diff --git a/server/main.py b/server/main.py
index f730b9e9429934e288819052f658087988aaccea..820e58ac554b42d2174796d2ebde77c3f201b4ff 100644
--- a/server/main.py
+++ b/server/main.py
@@ -7,6 +7,7 @@ from map_handler.process_lake import fetch_divided_map
 from map_handler.input_new_data import input_new_Lidar_data
 from urllib.parse import urlparse, parse_qs
 import ssl
+import json
 import sqlite3
 
 app = Flask(__name__)
@@ -38,7 +39,17 @@ class IceHTTP(BaseHTTPRequestHandler):
             self.end_headers()
 
             self.wfile.write(b"Root path hit!")
+        elif self.path == '/get_lake_names':
+            with open('server/map_handler/lake_relations/all_lake_names.json', 'r') as file:
+                lake_names = json.load(file)
 
+            json_data = json.dumps(lake_names, ensure_ascii=False)
+
+            self.send_response(200)
+            self.send_header('Content-type', 'application/json')
+            self.end_headers()
+
+            self.wfile.write(json_data.encode('iso-8859-1'))
         elif self.path.startswith('/update_map'):  # NB: should be POST?
             parsed_path = urlparse(self.path)
             query_params = parse_qs(parsed_path.query)
diff --git a/server/map_handler/__pycache__/process_lake.cpython-311.pyc b/server/map_handler/__pycache__/process_lake.cpython-311.pyc
index 5a0aa0d8078879c49ca64ebba1a48b02f6626337..8789c1778de26f2fba48817f2cc609251a53097c 100644
Binary files a/server/map_handler/__pycache__/process_lake.cpython-311.pyc and b/server/map_handler/__pycache__/process_lake.cpython-311.pyc differ
diff --git a/server/map_handler/lake_relations/added_lakes.txt b/server/map_handler/lake_relations/added_lakes.txt
deleted file mode 100644
index b10ac55a05d6891518a78a773134b3e89e79779b..0000000000000000000000000000000000000000
--- a/server/map_handler/lake_relations/added_lakes.txt
+++ /dev/null
@@ -1 +0,0 @@
-mjosa
\ No newline at end of file
diff --git a/server/map_handler/lake_relations/all_lake_names.json b/server/map_handler/lake_relations/all_lake_names.json
new file mode 100644
index 0000000000000000000000000000000000000000..33022b5cc38956cd75dd7b3e0de207660688ea3c
--- /dev/null
+++ b/server/map_handler/lake_relations/all_lake_names.json
@@ -0,0 +1,10 @@
+[
+  "Mjøsa",
+  "Bogstadsvannet",
+  "Einavatnet",
+  "Femsjøen",
+  "Femunden",
+  "Fjellsjøen",
+  "Gjende",
+  "Gjersjøen"
+]
diff --git a/server/map_handler/lake_relations/all_system_relations.txt b/server/map_handler/lake_relations/all_system_relations.txt
deleted file mode 100644
index 6a20bd4ab252d2b49eb5b1c23bc9aff62a24b9f3..0000000000000000000000000000000000000000
--- a/server/map_handler/lake_relations/all_system_relations.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Mjøsa - mjosa
-TestLake - testlake
\ No newline at end of file
diff --git a/server/map_handler/process_lake.py b/server/map_handler/process_lake.py
index dd0d202b30990c2ec7019fa8bf2d311df793bb2a..bf14018af244d9ff66bfdfdf21a04e9348814b8b 100644
--- a/server/map_handler/process_lake.py
+++ b/server/map_handler/process_lake.py
@@ -34,79 +34,4 @@ def get_ids_and_centers(file_name):  # NB buggy
             'sub_div_center': sub_div_center
         }
         subdivisions.append(subdivision)
-    return subdivisions
-
-# Create groups creates polygons which consist of groupings of related subdivisions
-def create_groups(relation_file: str, data: list):
-    try:
-        print("Creating groups...")
-
-        # Read lake relation from json file
-        geo_data = gpd.read_file("server/lake_relations/" + relation_file + "_div.json")
-        relation_data = geo_data[geo_data['geometry'].geom_type == 'Polygon']
-
-        # Loop through each measurement and create groupings of subdivisions
-        for measurement in data:
-            subdiv_list = []
-
-            for subdivision in measurement['Subdivisions']:
-                subDivID = str(subdivision['SubdivID'])  # Convert to string to match format in feature
-                group_id = subdivision['GroupID']  # Extract group ID
-
-                # Find the matching subdivision in relation_data
-                for index, feature in relation_data.iterrows():
-                    # Add the new group ID to the correct subdivision
-                    if feature['sub_div_id'] == subDivID:
-                        subdiv_list.append((group_id, Polygon(feature['geometry'].exterior.coords)))
-                        # subdiv_list.append((group_id, Polygon([feature['geometry']])))
-
-            # Sort subdiv_list based on group_ids
-            sorted_list = sorted(subdiv_list, key=lambda x: x[0])
-
-            current_group = -1  # Current group_id
-            new_shape = []      # List of subdivision geometries for current group
-
-            # Merge subdivisions in a given group
-            for element in sorted_list:
-                # If the subdivision still belongs to the current group
-                if element[0] == current_group:
-                    new_shape.append(element[1])
-
-                # New group id is found
-                elif len(new_shape) > 1:
-                    # Merger all subdivisions for previous group into a single shape
-                    merged_polygon = MultiPolygon(new_shape).buffer(0)
-
-                    # Convert to Polygon
-                    if isinstance(merged_polygon, MultiPolygon):
-                        merged_polygon = merged_polygon.convex_hull
-
-                    # Structure the new polygon
-                    merged_polygon_structure = {
-                        "type": "Feature",
-                        "properties": {
-                            "group_id": current_group,
-                        },
-                        "geometry": {
-                            "type": "Polygon",
-                            "coordinates": [list(merged_polygon.exterior.coords)]
-                        }
-                    }
-
-                    # Append new polygon to relation data
-                    relation_data = relation_data.append(merged_polygon_structure, ignore_index=True)
-
-                    # Update current group to new group_id and reset new_shape for next group
-                    current_group = element[0]
-                    new_shape = [element[1]]
-
-        # Convert GeoDataFrame to JSON
-        relation_data_json = json.loads(relation_data.to_json())
-
-        # Write relation with group shapes to file
-        write_json_to_file("server/lake_relations", "mjosa", relation_data_json)
-
-    except Exception as e:
-        print(f"Error in create_groups(): {e}")
-
-
+    return subdivisions
\ No newline at end of file