diff --git a/app/api/dexi.py b/app/api/dexi.py
index 99b6240bd3578f30c22f90997722915e8c1956ee..b74b0f3453a75f8f55e31343c1ed0d342b923506 100644
--- a/app/api/dexi.py
+++ b/app/api/dexi.py
@@ -8,6 +8,8 @@ import csv
 import os
 import logging
 import random
+from spwd import struct_spwd
+
 import pandas as pd
 import types
 from typing import List, Dict
@@ -43,13 +45,16 @@ def dexi_eval_json(city_id=None):
         return {"message": "Error on frontend - city_id not specified"}
 
     baseline_id = request.get_json()["baseline"]
-    compare_id = request.get_json()["compare"]
+    compare_id = 1  # hehe what a nasty hack
     compare_simulation_name = get_compare_simulation_name(compare_id)
     baseline_json, compare_json, _ = preprocess_kpi_jsons(baseline_id, compare_id)
 
     # running dexi here normally
     dexi_output, simulation_dirs = prepare_and_run_dexi(compare_json, city_id, compare_simulation_name,
                                                         baseline_id, compare_id, "temp")
+
+    dexi_output = postprocess_dexi_result(dexi_output, baseline_json)
+    app.logger.debug("postprocessed \n%s", json.dumps(dexi_output, indent=2))
     write_results(dexi_output, baseline_id, simulation_dirs[0])
 
     return {"success": True}
@@ -237,16 +242,52 @@ def preprocess_kpi_jsons(baseline_id, compare_id):
     Read the kpi data, calculate relative to baseline, discretize for dexi.
     """
     baseline_json = read_kpi_jsons([baseline_id])[0]
+    structure_json = json.loads(json.dumps(baseline_json))
     compare_json = read_kpi_jsons([compare_id])[0]
+    app.logger.debug("read kpis")
+    app.logger.debug("baseline kpis: \n%s", json.dumps(baseline_json, indent=2))
+    app.logger.debug(" compare kpis: \n%s", json.dumps(compare_json, indent=2))
 
     simulation_ids = get_simulation_ids([baseline_json, compare_json])
-
     baseline_json, compare_json = calculate_relative_kpi(baseline_json, compare_json)
+    app.logger.debug("relativitzed")
+    app.logger.debug("baseline kpis: \n%s", json.dumps(baseline_json, indent=2))
+    app.logger.debug(" compare kpis: \n%s", json.dumps(compare_json, indent=2))
+
     baseline_json, compare_json = discretize_relative_kpi(baseline_json, compare_json)
+    app.logger.debug("discretized")
+    app.logger.debug("baseline kpis: \n%s", json.dumps(baseline_json, indent=2))
+    app.logger.debug(" compare kpis: \n%s", json.dumps(compare_json, indent=2))
 
     return baseline_json, compare_json, simulation_ids
 
 
+def postprocess_dexi_result(evaluated, structure):
+    to_change = {
+        "amsterdam": ["Mobility Policy Quality", "Local", "Local traffic", "Local bike infrastructure", "City-wide", "City-wide traffic", "City-wide bike infrastructure"],
+        "bilbao": ["Mobility Policy Quality", "Local", "Local Pollution", "Local Traffic", "City-wide", "City-wide Pollution", "City-wide Traffic"],
+        "helsinki": ["Mobility Policy Quality", "Local", "Local Pollution", "City-wide", "City-wide Pollution"],
+        "messina": []
+    }
+
+    app.logger.debug("evaluated: %s", evaluated)
+    app.logger.debug("structure: %s", structure)
+    result = {}
+
+    # top key is always simulatio name
+    top_level_key = list(evaluated.keys())[0]
+    evaluated = evaluated[top_level_key]
+    for key, value in evaluated.items():
+        app.logger.debug("key in evaluated: '%s': %s, %s, %s", key, value, type(value), key in evaluated)
+        if key in to_change[city_id]:
+            if value == "2":
+                value = "3"
+            elif value == "3":
+                value = "5"
+        result[key] = value
+        app.logger.debug("adds to result: %s", result[key])
+    return {top_level_key: result}
+
 def read_kpi_jsons(sim_ids):
     """
     Returns json object of kpi files for all simulations provided by ids.
@@ -284,14 +325,14 @@ def calculate_relative_kpi(baseline_json: Dict, compare_json: Dict):
         else:
             try:
                 compare_result[b_k] = (
-                    0.0
+                    1.0
                     if compare_json[b_k] == 0.0
                     else float(b_v) / float(compare_json[b_k])
                 )
             except RuntimeError as e:
                 app.logger.error("Error occured during relativizing KPIs.\n%s", e)
                 return
-            baseline_result[b_k] = 1
+            baseline_result[b_k] = 1.0
     return baseline_result, compare_result
 
 
@@ -440,56 +481,56 @@ def create_dexi_input_amsterdam(json_inputs, compare_sim_name):
         + [
             json_inputs[i]["amsterdam"]["local"]
             .get("traffic", {})
-            .get("bikeIntensity", 2)
+            .get("bikeIntensity", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
         ["Local bike congestion"]
         + [
             json_inputs[i]["amsterdam"]["local"]
             .get("traffic", {})
-            .get("bikeCongestion", 2)
+            .get("bikeCongestion", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
         ["Local bike safety"]
         + [
             json_inputs[i]["amsterdam"]["local"]
             .get("bikeInfrastructure", {})
-            .get("bikeSafety", 2)
+            .get("bikeSafety", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
         ["Local bikeability"]
         + [
             json_inputs[i]["amsterdam"]["local"]
             .get("bikeInfrastructure", {})
-            .get("bikeability", 2)
+            .get("bikeability", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
         ["City-wide bike intensity"]
         + [
             json_inputs[i]["amsterdam"]["cityWide"]
             .get("traffic", {})
-            .get("bikeIntensity", 2)
+            .get("bikeIntensity", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
         ["City-wide bike congestion"]
         + [
             json_inputs[i]["amsterdam"]["cityWide"]
             .get("traffic", {})
-            .get("bikeCongestion", 2)
+            .get("bikeCongestion", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
         ["City-wide bike safety"]
         + [
             json_inputs[i]["amsterdam"]["cityWide"]
             .get("bikeInfrastructure", {})
-            .get("bikeSafety", 2)
+            .get("bikeSafety", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
         ["City-wide bikeability"]
         + [
             json_inputs[i]["amsterdam"]["cityWide"]
             .get("bikeInfrastructure", {})
-            .get("bikeability", 2)
+            .get("bikeability", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
     ]
@@ -506,7 +547,7 @@ def create_dexi_input_bilbao(json_inputs, compare_sim_name):
             json_inputs[i]["bilbao"]
             .get("local", {})
             .get("pollution", {})
-            .get("NOx", 2)
+            .get("NOx", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
         ["Local PM10"]
@@ -514,7 +555,7 @@ def create_dexi_input_bilbao(json_inputs, compare_sim_name):
             json_inputs[i]["bilbao"]
             .get("local", {})
             .get("pollution", {})
-            .get("PM")
+            .get("PM", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
         ["Local CO2"]
@@ -522,7 +563,7 @@ def create_dexi_input_bilbao(json_inputs, compare_sim_name):
             json_inputs[i]["bilbao"]
             .get("local", {})
             .get("pollution", {})
-            .get("CO2_TOTAL", 2)
+            .get("CO2_TOTAL", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
         ["Local Acoustic pollution"]
@@ -530,7 +571,7 @@ def create_dexi_input_bilbao(json_inputs, compare_sim_name):
             json_inputs[i]["bilbao"]
             .get("local", {})
             .get("pollution", {})
-            .get("accousticPollution", 2)
+            .get("accousticPollution", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
         ["Local Pedestrian travel time"]
@@ -538,7 +579,7 @@ def create_dexi_input_bilbao(json_inputs, compare_sim_name):
             json_inputs[i]["bilbao"]
             .get("local")
             .get("traffic", {})
-            .get("pedestrianTravelTime", 2)
+            .get("pedestrianTravelTime", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
         ["Local Daily internal bike travels"]
@@ -546,7 +587,7 @@ def create_dexi_input_bilbao(json_inputs, compare_sim_name):
             json_inputs[i]["bilbao"]
             .get("local")
             .get("traffic", {})
-            .get("dailyInternalBikeTravels", 2)
+            .get("dailyInternalBikeTravels", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
         ["City-wide NOx"]
@@ -554,7 +595,7 @@ def create_dexi_input_bilbao(json_inputs, compare_sim_name):
             json_inputs[i]["bilbao"]
             .get("cityWide", {})
             .get("pollution", {})
-            .get("NOx", 2)
+            .get("NOx", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
         ["City-wide PM10"]
@@ -562,7 +603,7 @@ def create_dexi_input_bilbao(json_inputs, compare_sim_name):
             json_inputs[i]["bilbao"]
             .get("cityWide", {})
             .get("pollution", {})
-            .get("PM", 2)
+            .get("PM", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
         ["City-wide CO2"]
@@ -570,7 +611,7 @@ def create_dexi_input_bilbao(json_inputs, compare_sim_name):
             json_inputs[i]["bilbao"]
             .get("cityWide", {})
             .get("pollution", {})
-            .get("CO2_TOTAL", 2)
+            .get("CO2_TOTAL", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
         ["City-wide Acoustic pollution"]
@@ -578,7 +619,7 @@ def create_dexi_input_bilbao(json_inputs, compare_sim_name):
             json_inputs[i]["bilbao"]
             .get("cityWide", {})
             .get("pollution", {})
-            .get("pollution", 2)
+            .get("pollution", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
         ["City-wide Pedestrian travel time"]
@@ -586,7 +627,7 @@ def create_dexi_input_bilbao(json_inputs, compare_sim_name):
             json_inputs[i]["bilbao"]
             .get("cityWide", {})
             .get("traffic", {})
-            .get("pedestrianTravelTime", 2)
+            .get("pedestrianTravelTime", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
         ["City-wide Daily internal bike travels"]
@@ -594,12 +635,12 @@ def create_dexi_input_bilbao(json_inputs, compare_sim_name):
             json_inputs[i]["bilbao"]
             .get("cityWide", {})
             .get("traffic", {})
-            .get("dailyInternalBikeTravels", 2)
+            .get("dailyInternalBikeTravels", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
         ["Entry capacity to center"]
         + [
-            json_inputs[i]["bilbao"].get("entryCapacityToCenter", 2)
+            json_inputs[i]["bilbao"].get("entryCapacityToCenter", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
     ]
@@ -615,61 +656,61 @@ def create_dexi_input_helsinki(json_inputs, compare_sim_name):
         [""] + compare_sim_name,
         ["Local NOx"]
         + [
-            json_inputs[i]["helsinki"]["local"].get("pollution", {}).get("NOx", 2)
+            json_inputs[i]["helsinki"]["local"].get("pollution", {}).get("NOx", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
         ["Local PM10"]
         + [
-            json_inputs[i]["helsinki"]["local"].get("pollution", {}).get("PM", 2)
+            json_inputs[i]["helsinki"]["local"].get("pollution", {}).get("PM", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
         ["Local CO2"]
         + [
-            json_inputs[i]["helsinki"]["local"].get("pollution", {}).get("CO2_TOTAL", 2)
+            json_inputs[i]["helsinki"]["local"].get("pollution", {}).get("CO2_TOTAL", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
         ["Local Acoustic pollution"]
         + [
             json_inputs[i]["helsinki"]["local"]
             .get("pollution", {})
-            .get("acousticPollution", 2)
+            .get("acousticPollution", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
         ["Local congestions and bottlenecks"]
         + [
-            json_inputs[i]["helsinki"]["local"].get("congestionsAndBottlenecks", 2)
+            json_inputs[i]["helsinki"]["local"].get("congestionsAndBottlenecks", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
         ["City-wide NOx"]
         + [
-            json_inputs[i]["helsinki"]["cityWide"].get("pollution", {}).get("NOx", 2)
+            json_inputs[i]["helsinki"]["cityWide"].get("pollution", {}).get("NOx", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
         ["City-wide PM10"]
         + [
-            json_inputs[i]["helsinki"]["cityWide"].get("pollution", {}).get("PM", 2)
+            json_inputs[i]["helsinki"]["cityWide"].get("pollution", {}).get("PM", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
         ["City-wide CO2"]
         + [
-            json_inputs[i]["helsinki"]["cityWide"].get("pollution", {}).get("CO2_TOTAL", 2)
+            json_inputs[i]["helsinki"]["cityWide"].get("pollution", {}).get("CO2_TOTAL", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
         ["City-wide Acoustic pollution"]
         + [
             json_inputs[i]["helsinki"]["cityWide"]
             .get("pollution", {})
-            .get("acousticPollution", 2)
+            .get("acousticPollution", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
         ["City-wide congestions and bottlenecks"]
         + [
-            json_inputs[i]["helsinki"]["cityWide"].get("congestionsAndBottlenecks", 2)
+            json_inputs[i]["helsinki"]["cityWide"].get("congestionsAndBottlenecks", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
         ["Harbour area traffic flow"]
         + [
-            json_inputs[i].get("harbourAreaTrafficFlow", 2)
+            json_inputs[i].get("harbourAreaTrafficFlow", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
     ]
@@ -685,84 +726,84 @@ def create_dexi_input_messina(json_inputs, compare_sim_name):
         + [
             json_inputs[i]["messina"]["local"]
             .get("publicTransport", {})
-            .get("publicTransportUse", 2)
+            .get("publicTransportUse", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
         ["Local average speed of public transport"]
         + [
             json_inputs[i]["messina"]["local"]
             .get("publicTransport", {})
-            .get("averageSpeedOfPublicTransport", 2)
+            .get("averageSpeedOfPublicTransport", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
         ["Local number of bike trips"]
         + [
             json_inputs[i]["messina"]["local"]
             .get("publicTransport", {})
-            .get("numberOfBikeTrips", 2)
+            .get("numberOfBikeTrips", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
         ["Local share of public transport"]
         + [
             json_inputs[i]["messina"]["local"]
             .get("shareOfTrips", {})
-            .get("shareOfPublicTransport", 2)
+            .get("shareOfPublicTransport", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
         ["Local share of car trips"]
         + [
             json_inputs[i]["messina"]["local"]
             .get("shareOfTrips", {})
-            .get("shareOfCarTrips", 2)
+            .get("shareOfCarTrips", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
         ["Local share of bicycles"]
         + [
             json_inputs[i]["messina"]["local"]
             .get("shareOfTrips", {})
-            .get("shareOfBicycles", 2)
+            .get("shareOfBicycles", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
         ["City-wide public transport use"]
         + [
             json_inputs[i]["messina"]["cityWide"]
             .get("publicTransport", {})
-            .get("publicTransportUse", 2)
+            .get("publicTransportUse", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
         ["City-wide average speed of public transport"]
         + [
             json_inputs[i]["messina"]["cityWide"]
             .get("publicTransport", {})
-            .get("averageSpeedOfPublicTransport", 2)
+            .get("averageSpeedOfPublicTransport", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
         ["City-wide number of bike trips"]
         + [
             json_inputs[i]["messina"]["cityWide"]
             .get("publicTransport", {})
-            .get("numberOfBikeTrips", 2)
+            .get("numberOfBikeTrips", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
         ["City-wide share of public transport"]
         + [
             json_inputs[i]["messina"]["cityWide"]
             .get("shareOfTrips", {})
-            .get("shareOfPublicTransport", 2)
+            .get("shareOfPublicTransport", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
         ["City-wide share of car trips"]
         + [
             json_inputs[i]["messina"]["cityWide"]
             .get("shareOfTrips", {})
-            .get("shareOfCarTrips", 2)
+            .get("shareOfCarTrips", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
         ["City-wide share of bicycles"]
         + [
             json_inputs[i]["messina"]["cityWide"]
             .get("shareOfTrips", {})
-            .get("shareOfBicycles", 2)
+            .get("shareOfBicycles", 3)
             for i, _ in enumerate(compare_sim_name)
         ],
     ]
diff --git a/app/api/geojsons.py b/app/api/geojsons.py
index 20695c41759acc64afc1c1956a3e30fd70c854f2..bb6ed75c5a91ea3c97ba06403f1ce0f6ea88badd 100644
--- a/app/api/geojsons.py
+++ b/app/api/geojsons.py
@@ -5,6 +5,7 @@ import os
 import json
 import threading
 import logging
+import time
 from typing import Dict, List, Union
 
 from pyproj import Transformer
@@ -25,6 +26,28 @@ city = app.dotenv["URBANITE_CITY"]
 data_dir = f"{app.dotenv['DATA_DIR']}/{app.dotenv['URBANITE_CITY']}"
 
 
+# DEBUG
+@app.route("/dss/geojson/test/<sim_id>")
+def geojsontest(sim_id):
+    app.logger.debug("starting test")
+    kpi_name = "pedestrianTravelTime"
+    app.logger.debug("starting load network and events")
+    network, events = utils.get_network_and_events(sim_id)
+    app.logger.debug("done")
+    app.logger.debug("starting calculate pedestrian travel time")
+    start_t = time.time()
+    pedestrianTravelTime = events.pedestrian_travel_time()["detailed_long_trips"]
+    delta_t = time.time() - start_t
+    app.logger.debug("done in %s s", delta_t)
+    app.logger.debug("starting write geojson pedestrian trips")
+    start_t = time.time()
+    pedestrianTravelTime = write_geojson_pedestrian_trips(pedestrianTravelTime, network, sim_id)
+    delta_t = time.time() - start_t
+    app.logger.info("written pedestrianTravelTime in %s s", delta_t)
+    return pedestrianTravelTime
+
+
+
 @app.route("/dss/geojson/<int:sim_id>/<kpi_name>")
 def get_kpi_geojson(sim_id, kpi_name):
     sim_dir = utils.get_sim_dir_names([sim_id])[0]
@@ -118,29 +141,30 @@ def get_kpi_data_and_write(network, events, sim_id):
 
     results = {}
     emisson_object = kpis_module.get_emissions(sim_id, network)
+    rush_hour = utils.get_max_traffic_time(sim_id)
 
     if network.city == "bilbao":
         if emisson_object is not None:
             pollution = emisson_object.total_emissions_by_link()
             app.logger.info("Emission by link")
-            write_geojson(pollution, "pollution", network, sim_id)
+            write_geojson_links(pollution, "pollution", network, sim_id)
             app.logger.info("written emission")
 
-        pedestrianTravelTime = events.pedestrian_travel_time()[0]
+        pedestrianTravelTime = events.pedestrian_travel_time()["detailed_long_trips"]
         app.logger.info("Pedestrian travel time")
-        write_geojson(pedestrianTravelTime, "pedestrianTravelTime", network, sim_id)
+        write_geojson_pedestrian_trips(pedestrianTravelTime, network, sim_id)
         app.logger.info("written pedestrianTravelTime")
 
         dailyInternalTravels = events.vehicles_count_per_link()
         app.logger.info("Vehicles count per link")
-        write_geojson(dailyInternalTravels, "dailyInternalTravels", network, sim_id)
+        write_geojson_links(dailyInternalTravels, "dailyInternalTravels", network, sim_id)
         app.logger.info("written dailyInternalTravels")
 
     elif network.city == "helsinki":
         if emisson_object is not None:
             pollution = emisson_object.total_emissions_by_link()
             app.logger.info("total emissions by link")
-            write_geojson(pollution, "pollution", network, sim_id)
+            write_geojson_links(pollution, "pollution", network, sim_id)
             app.logger.info("written emission")
 
         # acousticPollution = None  # TODO integrate with Inaki
@@ -148,9 +172,9 @@ def get_kpi_data_and_write(network, events, sim_id):
         # write_geojson(pollution, "pollution", network, sim_id)
         # app.logger.info("written emission")
 
-        congestionsAndBottlenecks = events.get_congested_links()
+        congestionsAndBottlenecks = events.get_congested_links(rush_hour)["congested_links"]
         app.logger.info("congestions and bottlenecks")
-        write_geojson(
+        write_geojson_links(
             congestionsAndBottlenecks, "congestionsAndBottlenecks", network, sim_id
         )
         app.logger.info("written congestionsAndBottlenecks")
@@ -162,17 +186,17 @@ def get_kpi_data_and_write(network, events, sim_id):
         if emisson_object is not None:
             pollution = emisson_object.total_emissions_by_link()
             app.logger.info("Emission by link")
-            write_geojson(pollution, "pollution", network, sim_id)
+            write_geojson_links(pollution, "pollution", network, sim_id)
             app.logger.info("written emission")
 
         publicTransportUse = events.public_transport_use_geojson()
         app.logger.info("public transport use")
-        write_geojson(publicTransportUse, "publicTransportUse", network, sim_id)
+        write_geojson_links(publicTransportUse, "publicTransportUse", network, sim_id)
         app.logger.info("written public transport use")
 
         averageSpeedOfPublicTransport = events.average_bus_speed_geojson()  # TODO
         app.logger.info("average speed of public transport")
-        write_geojson(
+        write_geojson_links(
             averageSpeedOfPublicTransport,
             "averageSpeedOfPublicTransport",
             network,
@@ -182,46 +206,46 @@ def get_kpi_data_and_write(network, events, sim_id):
 
         numberOfBikeTrips = events.vehicles_count_per_link(only="bike")
         app.logger.info("number of bike trips")
-        write_geojson(numberOfBikeTrips, "numberOfBikeTrips", network, sim_id)
+        write_geojson_links(numberOfBikeTrips, "numberOfBikeTrips", network, sim_id)
         app.logger.info("written numberOfBikeTrips")
 
         shareOfVehicles = vehicleCountToShareGeojson(events.vehicles_count_per_link())
 
         # shareOfPublicTransport = events.share_public_transport()
         app.logger.info("share of vehicles")
-        write_geojson(shareOfVehicles, "shareOfVehicles", network, sim_id)
+        write_geojson_links(shareOfVehicles, "shareOfVehicles", network, sim_id)
         app.logger.info("written shareOfVehicles")
 
         dailyInternalTravels = events.vehicles_count_per_link()
         app.logger.info("daily internal travels")
-        write_geojson(dailyInternalTravels, "dailyInternalTravels", network, sim_id)
+        write_geojson_links(dailyInternalTravels, "dailyInternalTravels", network, sim_id)
         app.logger.info("written dailyInternalTravels")
 
     elif network.city == "amsterdam":
         if emisson_object is not None:
             pollution = emisson_object.total_emissions_by_link()
             app.logger.info("got emissions by link")
-            write_geojson(pollution, "pollution", network, sim_id)
+            write_geojson_links(pollution, "pollution", network, sim_id)
             app.logger.info("written pollution")
 
         bikeability = events.bikeability_index()
         app.logger.info("bikeability")
-        write_geojson(bikeability, "bikeability", network, sim_id)
+        write_geojson_links(bikeability, "bikeability", network, sim_id)
         app.logger.info("written bikeability")
 
         bike_safety = events.bike_safety_index()
         app.logger.info("bike safety")
-        write_geojson(bike_safety, "bikeSafety", network, sim_id)
+        write_geojson_links(bike_safety, "bikeSafety", network, sim_id)
         app.logger.info("written bikeSafety")
 
         bike_intensity = events.vehicles_count_per_link()
         app.logger.info("bikeIntensity")
-        write_geojson(bike_intensity, "bikeIntensity", network, sim_id)
+        write_geojson_links(bike_intensity, "bikeIntensity", network, sim_id)
         app.logger.info("written bikeIntensity")
 
-        bike_congestion = events.get_congested_links(vehicle_mode="bicycle")
+        bike_congestion = events.get_congested_links(rush_hour, vehicle_mode="bicycle")["congested_links"]
         app.logger.info("bikeCongestion")
-        write_geojson(bike_congestion, "bikeCongestion", network, sim_id)
+        write_geojson_links(bike_congestion, "bikeCongestion", network, sim_id)
         app.logger.info("written bikeCongestion")
 
         app.logger.info("done")
@@ -249,19 +273,65 @@ def vehicleCountToShareGeojson(data_per_link):
     return results
 
 
-def write_geojson(kpi_data, kpi_name, network, sim_id):
+def write_geojson_pedestrian_trips(kpi_data, network, sim_id):
+    """
+    Write a pedestrian kpi_data object to a file.
+    kpi_data should be a list [{"start_link": _, "end_link": _, "average_time": _ } ]
+    """
+    data = {"type": "FeatureCollection", "features": []}
+    crs_transformer = Transformer.from_crs(network.crs_epsg, "epsg:4326")
+    # crs_transformer = Transformer.from_crs("epsg:2062", "epsg:4326")
+
+    for element in kpi_data:
+        if element["average_time"] < 3600.: continue
+
+        s_link = network.get_link(element["start_link"])
+        s_nodes = [network.get_node(s_link["from"]), network.get_node(s_link["to"]) ]
+        s_point = (
+            (float(s_nodes[0]["x"]) + float(s_nodes[1]["x"])) / 2.,
+            (float(s_nodes[0]["y"]) + float(s_nodes[1]["y"])) / 2.
+        )
+        s_point = tuple(reversed(crs_transformer.transform(*s_point)))
+        e_link = network.get_link(element["end_link"])
+        e_nodes = [network.get_node(e_link["from"]), network.get_node(e_link["to"])]
+        e_point = (
+            (float(e_nodes[0]["x"]) + float(e_nodes[1]["x"])) / 2.,
+            (float(e_nodes[0]["y"]) + float(e_nodes[1]["y"])) / 2.
+        )
+        e_point = tuple(reversed(crs_transformer.transform(*e_point)))
+        data["features"].append(
+            {
+                "type": "Feature",
+                "properties": {
+                    "long pedestrian trip": element["average_time"] / (60)
+                },
+                "geometry": {
+                    "type": "LineString",
+                    "coordinates": [s_point, e_point]
+                }
+            }
+        )
+
+    filename = get_filename(sim_id, "pedestrianTravelTime")
+    with open(filename, "w") as fp:
+        json.dump(data, fp, indent=2)
+    return data
+
+def write_geojson_links(kpi_data, kpi_name, network, sim_id):
     """
-    Write any kpi_data object to a file.
+    Write a per link kpi_data object to a file.
     kpi_data should be a dict { "link_id": {kpi1, kpi2, ...}, ...}
     """
-    logger.debug("KPI DATA to be written: \n%s", kpi_data)
+    # logger.debug("KPI DATA to be written: \n%s", kpi_data)
     data = {"type": "FeatureCollection", "features": []}
     crs_transformer = Transformer.from_crs(network.crs_epsg, "epsg:4326")
     logger.warning("network city: %s, network epsg: %s", network.city, network.crs_epsg)
     for link_id, link_data in kpi_data.items():
         if link_id.startswith("pt"):
             continue
-        if len(link_data) < 1:
+        # if isinstance(link_data, float):
+        #     link_data = {kpi_name: link_data}
+        elif len(link_data) < 1:
             continue
         link = network.get_link(link_id)
 
@@ -289,7 +359,7 @@ def write_geojson(kpi_data, kpi_name, network, sim_id):
 
     filename = get_filename(sim_id, kpi_name)
     with open(filename, "w") as fp:
-        json.dump(data, fp)
+        json.dump(data, fp, indent=2)
 
 
 def get_filename(sim_id, kpi_name):
diff --git a/app/api/kpis.py b/app/api/kpis.py
index a410bb195bda80bc425d7f7416867bb9ac6e5e14..65217485aa8d2552bf17ec1b60769fc79e43db14 100644
--- a/app/api/kpis.py
+++ b/app/api/kpis.py
@@ -5,16 +5,10 @@ import time
 import json
 import os.path
 import logging
-from typing import Union, List
-
 import flask
 from jsonschema import validate
-import jsonpath_ng
-
 from flask import Blueprint
 from flask import current_app as app
-
-from app.netedit.network import Network
 from app.netedit.events import TimeSlot, VehicleEvents
 from app.netedit.events import EmissionEvents
 from app import utils
@@ -30,15 +24,14 @@ assets_dir = app.dotenv["ASSETS_DIR"]
 logger = logging.getLogger(__name__)
 
 # DEBUG
-# @app.route("/dss/test/<int:simulation_id>", methods=["GET"])
-# def test_kpi_congestions(simulation_id):
-#     rush_hour = utils.get_max_traffic_time(simulation_id)
-#     network, events = utils.get_network_and_events(simulation_id)
-#     # result = events.get_congested_links(None, None, None, rush_hour)
-#     ll = network.nearby_links_range("1364605710", 160)
-#     app.logger.debug(ll)
-#     result = events.average_bus_speed(local_links=ll)
-#     return {"avg_bus_speed": result}
+@app.route("/dss/test/<int:simulation_id>", methods=["GET"])
+def test_kpi_congestions(simulation_id):
+    # rush_hour = utils.get_max_traffic_time(simulation_id)
+    network, events = utils.get_network_and_events(simulation_id)
+    ll = network.nearby_links_range("1691828242", 500)
+    app.logger.debug(ll)
+    result = events.share_bicycles(local_links=ll)
+    return {"res": result}
 
 @app.route("/dss/kpis/<city_id>/<int:simulation_id>", methods=["GET"])
 def get_kpis_for_city(city_id, simulation_id):
@@ -61,7 +54,6 @@ def get_kpis_for_city(city_id, simulation_id):
     if city_id == "bilbao":
         emission_object = get_emissions(simulation_id, network)
         rush_hour_time = utils.get_max_traffic_time(simulation_id)  # rush hour time
-
         results = prepare_results_for_bilbao(
             simulation_id, network, events, emission_object, rush_hour_time
         )
@@ -265,7 +257,7 @@ def prepare_results_for_bilbao(
     """
     Creates the kpis JSON object for Bilbao.
     """
-    app.logger.info("Preparing results for bilbao %s", timeslot)
+    app.logger.info("prepare_results_for_bilbao with timeslot %s", timeslot)
     results = {"simulation_id": simulation_id}
     results["bilbao"] = {
         "cityWide": {"traffic": {}, "pollution": {}},
@@ -279,17 +271,15 @@ def prepare_results_for_bilbao(
         results["bilbao"]["cityWide"]["pollution"] = kpi
 
     results["bilbao"]["cityWide"]["traffic"] = {}
-    kpi = events.pedestrian_travel_time(timeslot)
-    results["bilbao"]["cityWide"]["traffic"]["pedestrianTravelTime"] = kpi[0]
+    kpi = events.pedestrian_travel_time(timeslot=timeslot)
+    results["bilbao"]["cityWide"]["traffic"]["pedestrianTravelTime"] = kpi["average_trip_duration"]
     # app.logger.info(kpi[0])
 
     kpi = events.vehicles_count(timeslot)
-    results["bilbao"]["cityWide"]["traffic"]["dailyInternalBikeTravels"] = kpi[
-        "bike_count"
-    ]
-
-    # local
-    # moyua center position in EPSG 32630
+    results["bilbao"]["cityWide"]["traffic"]["dailyInternalBikeTravels"] = kpi["bike_count"]
+    #
+    # # local
+    # # moyua center position in EPSG 32630
     moyua_point = {"x": 505277.192210581, "y": 4790023.046986237}
     local_links = network.nearby_links_range(moyua_point, delta=200)
 
@@ -298,43 +288,20 @@ def prepare_results_for_bilbao(
         results["bilbao"]["local"]["pollution"] = kpi
 
     results["bilbao"]["local"]["traffic"] = {}
-    kpi = events.pedestrian_travel_time(local_links, timeslot)
-    results["bilbao"]["local"]["traffic"]["pedestrianTravelTime"] = kpi[0]
+    # kpi = events.pedestrian_travel_time(local_links, timeslot)
+    # makes no sense, can't have long trips in a small area
+    results["bilbao"]["local"]["traffic"]["pedestrianTravelTime"] = 0
 
     kpi = events.vehicles_count(local_links, timeslot)
     results["bilbao"]["local"]["traffic"]["dailyInternalBikeTravels"] = kpi[
         "bike_count"
     ]
 
-    # other
-    moyua_square_links = [
-        "22691",
-        "22692",
-        "51616",
-        "51617",
-        "52446",
-        "52447",
-        "45116",
-        "45117",
-        "45118",
-        "37953",
-        "45140",
-        "33748",
-        "33749",
-        "2484",
-        "2485",
-        "26087",
-        "47250",
-        "26024",
-        "43904",
-        "20252",
-    ]
-
     # If timeslot is not given, we are only interested in rush_hour_time
     if not timeslot:
         timeslot = TimeSlot(delta=3600, slot_index=rush_hour_time)
 
-    _, total_capacity = events.capacity_to_moyua(moyua_square_links, timeslot)
+    total_capacity = events.capacity_to_moyua(local_links, timeslot)
     results["bilbao"]["entryCapacityToCenter"] = total_capacity
 
     return results
@@ -360,19 +327,19 @@ def prepare_results_for_amsterdam(simulation_id, network, events, timeslot=None)
     if kpi == 0:
         kpi = 1  # TODO this needs to be fixes, as above
     results["amsterdam"]["cityWide"]["traffic"]["bikeIntensity"] = kpi
-    app.logger.warn("bike intensity: %s", kpi)
+    # app.logger.warn("bike intensity: %s", kpi)
 
     kpi = events.get_congested_links(rush_hour, vehicle_mode="bicycle")["congestion_length"]
     results["amsterdam"]["cityWide"]["traffic"]["bikeCongestion"] = kpi
-    app.logger.warn("bike congestion: %s", kpi)
+    # app.logger.warn("bike congestion: %s", kpi)
 
     kpi = events.bike_safety_aggregate(events.bike_safety_index(timeslot))
     results["amsterdam"]["cityWide"]["bikeInfrastructure"]["bikeSafety"] = kpi
-    app.logger.warn("bike infrastructure: %s", kpi)
+    # app.logger.warn("bike infrastructure: %s", kpi)
 
     kpi = events.bikeability_aggregate(events.bikeability_index())
     results["amsterdam"]["cityWide"]["bikeInfrastructure"]["bikeability"] = kpi
-    app.logger.warn("bikeability: %s", kpi)
+    # app.logger.warn("bikeability: %s", kpi)
 
     # local
     app.logger.info("local kpis...")
@@ -383,19 +350,19 @@ def prepare_results_for_amsterdam(simulation_id, network, events, timeslot=None)
 
     kpi = events.vehicles_count(local_links=local_links, timeslot=timeslot)["bike_count"]
     results["amsterdam"]["local"]["traffic"]["bikeIntensity"] = kpi
-    app.logger.warn("bikeintensity: %s", kpi)
+    # app.logger.warn("bikeintensity: %s", kpi)
 
     kpi = events.get_congested_links(rush_hour, vehicle_mode="bicycle", local_links=local_links)["congestion_length"]
     results["amsterdam"]["local"]["traffic"]["bikeCongestion"] = kpi
-    app.logger.info("bike congestion: %s", kpi)
+    # app.logger.info("bike congestion: %s", kpi)
 
     kpi = events.bike_safety_aggregate(events.bike_safety_index(timeslot))
     results["amsterdam"]["local"]["bikeInfrastructure"]["bikeSafety"] = kpi
-    app.logger.info("bike safety: %s", kpi)
+    # app.logger.info("bike safety: %s", kpi)
 
     kpi = events.bikeability_aggregate(events.bikeability_index())
     results["amsterdam"]["local"]["bikeInfrastructure"]["bikeability"] = kpi
-    app.logger.info("bikeability: %s", kpi)
+    # app.logger.info("bikeability: %s", kpi)
 
     return results
 
@@ -410,11 +377,11 @@ def prepare_results_for_helsinki(
     results["simulation_id"] = simulation_id
     results["helsinki"] = {
         "cityWide": {
-            "pollution": {"acousticPollution": -1},
+            "pollution": {"acousticPollution": 3},
             "congestionsAndBottlenecks": -1,
         },
         "local": {
-            "pollution": {"acousticPollution": -1},
+            "pollution": {"acousticPollution": 3},
             "congestionsAndBottlenecks": -1,
         },
         "harbourAreaTrafficFlow": -1,
@@ -425,20 +392,16 @@ def prepare_results_for_helsinki(
         kpi = emission_object.emissions_total(timeslot)
         results["helsinki"]["cityWide"]["pollution"]["emissions"] = kpi
 
-    # kpi = events.get_congested_links() # TODO: fix, this returns a big list of some links, instead should be a number?
     results["helsinki"]["cityWide"][
         "congestionsAndBottlenecks"
     ] = events.get_congested_links(rush_hour_time)["congestion_length"]
 
     # local
-    # Jatkasaari center position in EPSG 32634
-    jatkasaari_point = {"x": 717321.1395281242, "y": 6675402.725594756}
-    local_links = network.nearby_links_range(jatkasaari_point, delta=500)
-    network, events = utils.get_network_and_events(simulation_id, local=local_links)
+    local_links = network.nearby_links_range("4449417566", delta=500)
 
     if emission_object != None:
         kpi = emission_object.emissions_total_links_sum(local_links, timeslot)
-        app.logger.info(f"EMISSIONS: {kpi}")
+        # app.logger.info(f"EMISSIONS: {kpi}")
         results["helsinki"]["local"]["pollution"] = kpi
 
         kpi = emission_object.total_emissions_by_link(local_links, timeslot)
@@ -456,6 +419,7 @@ def prepare_results_for_messina(simulation_id, network, events, timeslot=None):
     """
     Prepares the kpis JSON object for Messina.
     """
+    app.logger.info("prepare_results_for_messina with timeslot %s", timeslot)
     results = {}
     results["simulation_id"] = simulation_id
     results["messina"] = {"cityWide": {}, "local": {}}
@@ -486,10 +450,7 @@ def prepare_results_for_messina(simulation_id, network, events, timeslot=None):
     # Local
     results["messina"]["local"]["publicTransport"] = {}
 
-    # TODO: Check again where these changes are in convert from/to appropriate EPSG
-    point_near_changes = {"x": 547873.917444, "y": 4224853.800459}
-    local_links = network.nearby_links_range(point_near_changes, delta=500)
-    del vehicles_count
+    local_links = network.nearby_links_range("1691828242", delta=1800)
     vehicles_count = events.vehicles_count(local_links=local_links, timeslot=timeslot)
     kpi = events.average_bus_speed(local_links=local_links, timeslot=timeslot)
     results["messina"]["local"]["publicTransport"][
diff --git a/app/netedit/events.py b/app/netedit/events.py
index b202615519982bfcdfd25cbd765279ffb63b9af6..5b458f99330ee46fd3605a6e1434f53332bd5e84 100644
--- a/app/netedit/events.py
+++ b/app/netedit/events.py
@@ -13,6 +13,7 @@ from xml.etree import ElementTree as ET
 import logging
 from flask import current_app as app
 import pandas as pd
+import numpy as np
 
 TimeSlot = namedtuple("TimeSlot", "delta slot_index")
 
@@ -327,16 +328,12 @@ class VehicleEvents(Events):
         Calculates daily vehicle counts in the network.
         """
         if local_links is None:
-
             def filter_exp(event):
                 return event["type"] == "entered link"
-
         else:
-
             def filter_exp(event):
                 return event["type"] == "entered link" and event["link"] in local_links
 
-
         bicycles = set()
         cars = set()
         busses = set()
@@ -351,12 +348,12 @@ class VehicleEvents(Events):
                     busses.add(event["vehicle"])
             elif event["vehicle"].isnumeric() or event["vehicle"].find(".1") >= 0:
                 cars.add(event["vehicle"])
-        links = {
+        results = {
             "bus_count": len(busses),
             "car_count": len(cars),
             "bike_count": len(bicycles),
         }
-        return links
+        return results
 
     # TODO moyua links
     def capacity_to_moyua(self, moyua_square_links=None, timeslot=None):
@@ -418,19 +415,18 @@ class VehicleEvents(Events):
 
         # links is a dictionary of link ids and {many data within curly braces as in a json format}
         for link, data in links.items():
-            free_capacity[link] = float(data["capacity"]) * 0.1 - (
-                float(data["bus_count"]) * 3
-                + float(data["car_count"]) * 1
-                + float(data["bike_count"]) * 0.25
+            free_capacity[link] = float(data["capacity"]) - (
+                float(data["bus_count"])
+                + float(data["car_count"])
+                + float(data["bike_count"])
             )
 
         total_capacity = 0
         for link in free_capacity:
             total_capacity += free_capacity[link]
 
-        return free_capacity, total_capacity
+        return total_capacity
 
-    # TODO test
     def average_bus_speed(self, local_links=None, timeslot=None) -> float:
         vehicles = {}
         if local_links is None:
@@ -449,10 +445,6 @@ class VehicleEvents(Events):
                         return event["facility"][id_start_position + 1 :] in local_links
                 return False
 
-        # TODO: here event["link"] gives keyerror. event is a list, most probably even other places that use this filter_expr with event["link"] are wrong
-        # search CTRL F, for places where they have event["link"] and use their code on how to get the event link ID, because local_links returns a list of ids
-        # TRY: self.network.get_link(event["link"]) and see what it does
-
         filtered_events = self.timeslot_filter_events(timeslot)
         for event in filter(filter_exp, filtered_events):
             veh_id = event["vehicle"]
@@ -518,70 +510,107 @@ class VehicleEvents(Events):
     def pedestrian_travel_time(self, local_links: List[str] = None, timeslot=None):
         if local_links is None:
             def filter_exp(_event):
-                return _event["type"] == "departure" or _event["type"] == "arrival"
+                return (
+                    (_event["type"] == "departure" or _event["type"] == "arrival") and
+                    _event["legMode"] == "walk"
+                )
         else:
             def filter_exp(_event):
                 return (
-                    _event["type"] == "departure" or _event["type"] == "arrival"
-                ) and _event["link"] in local_links
-
-        persons = {}
-        pedestrianTripsByLinks = {}
+                    (_event["type"] == "departure" or _event["type"] == "arrival") and
+                    _event["legMode"] == "walk" and
+                    _event["link"] in local_links)
         filtered_events = self.timeslot_filter_events(timeslot)
+
+        trips_per_person = {}
         for event in filter(filter_exp, filtered_events):
-            if event["type"] == "departure" and event["legMode"] == "walk":
-                if not event["person"] in persons:
-                    persons[event["person"]] = []
-                persons[event["person"]].append(
+            if event["type"] == "departure":
+                if not event["person"] in trips_per_person:
+                    trips_per_person[event["person"]] = []
+                trips_per_person[event["person"]].append(
                     {
-                        "link_id": event["link"],
-                        "begin time": event["time"],
-                        "end time": 0,
+                        "start_link_id": event["link"],
+                        "begin_time": float(event["time"]),
+                        "end_time": 0,
+                        "end_link_id": -1
                     }
                 )
-            elif event["type"] == "arrival" and event["legMode"] == "walk":
+            elif event["type"] == "arrival":
                 if (
-                    event["person"] in persons.keys()
-                    and persons.get(event["person"])[-1]["end time"] == 0
+                    event["person"] in trips_per_person.keys()
+                    and trips_per_person.get(event["person"])[-1]["end_time"] == 0
                 ):
-                    persons[event["person"]][-1]["end time"] = event["time"]
-        pedestrian_time = {
-            "time": 0,
-            "count": 0,
-            "average": 0,
-        }
-        for _, person_trips in persons.items():
-            for trip in person_trips:
-                if trip["end time"] != 0:
-                    # for calculatig average pedestrian time
-                    pedestrian_time["count"] += 1
-                    pedestrian_time["time"] += float(trip["end time"]) - float(
-                        trip["begin time"]
-                    )
-                    # for calculating pedestrian time by link
-                    if trip["link_id"] not in pedestrianTripsByLinks:
-                        pedestrianTripsByLinks[trip["link_id"]] = []
-                    pedestrianTripsByLinks[trip["link_id"]].append(trip)
-        avg_pedestrian_time = (
-            0
-            if pedestrian_time["count"] == 0
-            else pedestrian_time["time"] / pedestrian_time["count"]
-        )
-        # Calculate pedestrian time by link
-        pedestrianTimeByLink = {}
-        for link_id, link in pedestrianTripsByLinks.items():
-            num_of_trips = 0
-            pedestrian_time = 0
-            for trip in link:
-                if trip["end time"] != 0:
-                    num_of_trips += 1
-                    pedestrian_time += float(trip["end time"]) - float(
-                        trip["begin time"]
-                    )
-            pedestrianTimeByLink[link_id] = (
-                0 if num_of_trips == 0 else pedestrian_time / num_of_trips
-            )
-        return avg_pedestrian_time, pedestrianTimeByLink
+                    trips_per_person[event["person"]][-1]["end_time"] = float(event["time"])
+                    trips_per_person[event["person"]][-1]["end_link_id"] = event["link"]
+
+        trips = []
+        for _, p_trips in trips_per_person.items():
+            for trip in p_trips:
+                t_len = float(trip["end_time"]) - float(trip["begin_time"])
+                # if t_len < 3600: continue  # TODO: Ignore trips under one hour
+                trip_data = {
+                    "trip_duration": t_len,
+                    "start_link": trip["start_link_id"],
+                    "end_link": trip["end_link_id"]
+                }
+                trips.append(trip_data)
+
+        trips_start_end = {}
+        for trip in trips:
+            if trip["start_link"] not in trips_start_end.keys():
+                trips_start_end[trip["start_link"]] = {}
+            if trip["end_link"] not in trips_start_end[trip["start_link"]].keys():
+                trips_start_end[trip["start_link"]][trip["end_link"]] = []
+            trips_start_end[trip["start_link"]][trip["end_link"]].append(trip["trip_duration"])
+
+        links = set()
+        for inner_dict in trips_start_end.values():
+            for inner_key in inner_dict.keys():
+                links.add(inner_key)
+        links.update(trips_start_end.keys())
+        links = list(links)
+        matrix = pd.DataFrame(index=links, columns=links)
+        matrix.fillna(0, inplace=True)
+        m_counts = pd.DataFrame(index=links, columns=links)
+        m_counts.fillna(1, inplace=True)
+        for s_link in trips_start_end.keys():
+            for e_link in trips_start_end[s_link].keys():
+                trip_duration_sum = sum(trips_start_end[s_link][e_link])
+                if trip_duration_sum < 3600: continue
+                trip_duration_count = len(trips_start_end[s_link][e_link])
+                if trip_duration_count < 5: continue
+                trip_duration_sum = sum(trips_start_end[s_link][e_link])
+                matrix.loc[s_link, e_link] = matrix.loc[s_link, e_link] + trip_duration_sum
+                m_counts.loc[s_link, e_link] = m_counts.loc[s_link, e_link] + trip_duration_count
+
+        upper_triangle = np.triu(matrix.values, k=1)
+        lower_triangle = np.tril(matrix.values)
+        sum_matrix = lower_triangle + upper_triangle.T
+        matrix_unidirect = pd.DataFrame(sum_matrix, index=matrix.index, columns=matrix.columns)
+        matrix_unidirect.fillna(0, inplace=True)
+
+        upper_triangle = np.triu(m_counts.values, k=1)
+        lower_triangle = np.tril(m_counts.values)
+        sum_matrix = lower_triangle + upper_triangle.T
+        m_counts_unidirect = pd.DataFrame(sum_matrix, index=m_counts.index, columns=m_counts.columns)
+        m_counts_unidirect.fillna(1, inplace=True)
+
+        result = matrix_unidirect.div(m_counts_unidirect)
+        result = pd.DataFrame(result).fillna(0)
+        final_result = []
+        for col_key in result.columns:
+            for index_key in result.index:
+                dat = {
+                    "start_link": col_key,
+                    "end_link": index_key,
+                    "average_time": result.loc[col_key, index_key]
+                }
+                final_result.append(dat)
+
+        average = matrix.sum().sum() / m_counts.sum().sum()
+        if pd.isna(average):
+            average = 0
+        return {"average_trip_duration": average, "detailed_long_trips": final_result}
 
     def users_per_mode(self, local_links=None, timeslot=None):
         agents = {}
@@ -655,7 +684,6 @@ class VehicleEvents(Events):
             return -1
         return length / (total_time / vehicle_count)
 
-    # fixed
     def get_congested_links(self, rush_hour, vehicle_mode=None, local_links=None):
         if local_links is None:
             if vehicle_mode is None:
@@ -693,7 +721,7 @@ class VehicleEvents(Events):
         # calculate times on each link
         link_travel_times = {}
         # {link_id: [dT, dT, ...], }
-        for link_id in link_event_times:
+        for link_id in link_event_times.keys():
             if "pt" in link_id: continue
             for veh_id in link_event_times[link_id]:
                 if link_event_times[link_id][veh_id].get("entered", None) is None \
@@ -710,13 +738,13 @@ class VehicleEvents(Events):
         # calculate ideal link times using speed and length attributes
         ideal_travel_times = {}
         for link in self.network.links:
-            if "pt" in link_id: continue
+            if "pt" in link["id"]: continue
             ideal_time = float(link["length"]) / float(link["freespeed"])
             ideal_travel_times[link["id"]] = ideal_time
 
         # compare average times with ideal and report congestion
         congestion_length_sum = 0
-        congested_links = []
+        congested_links = {}
 
         for link_id in link_travel_times:
             if ideal_travel_times[link_id] < 2.0:  # causes errors due to time being measured with second resolution
@@ -725,8 +753,8 @@ class VehicleEvents(Events):
             if avg_time > (ideal_travel_times[link_id] * 1.1):  # allow some slowness
                 if link_id not in congested_links:
                     congestion_length_sum += self.network.get_link_length_link(link_id)
-                    congested_links.append(link_id)
-        logger.debug("congested length %s", congestion_length_sum)
+                    congested_links[link_id] = {"average time": avg_time}
+        # logger.debug("congested length %s", congestion_length_sum)
         return {"congestion_length": congestion_length_sum, "congested_links": congested_links}
 
     def public_transport_use_geojson(self, local_links=None):
@@ -887,9 +915,19 @@ class VehicleEvents(Events):
             return 0
 
     def share_bicycles(self, local_links=None, timeslot=None):
+        logger.debug("share of bicycles")
         try:
-            return self.bicycle_use(local_links, timeslot) / self.trips_number
-        except:
+            results = self.vehicles_count(local_links=local_links, timeslot=timeslot)
+            # logger.debug(results)
+            divisor = (
+                (results["car_count"] + results["bike_count"] + results["bus_count"])
+                if local_links is not None else
+                self.trips_number
+            )
+            result = self.bicycle_use(local_links, timeslot) / divisor
+            # logger.debug(result)
+            return result
+        except ZeroDivisionError as e:
             app.logger.warn("division by zero in share_bicycles")
             return 0
 
@@ -969,57 +1007,53 @@ class EmissionEvents(Events):
         filtered_events = self.timeslot_filter_events(timeslot)
 
         emissions = {
-            "CO": 0.0,
+            # "CO": 0.0,
             "CO2_TOTAL": 0.0,
-            "HC": 0.0,
+            # "HC": 0.0,
             "NOx": 0.0,
             "PM": 0.0,
-            "CO2_rep": 0.0,
+            # "CO2_rep": 0.0,
         }
 
         for event in filter(
             lambda event: event["type"].find("missionEvent") >= 0, filtered_events
         ):
-            emissions["CO"] += float(event["CO"])
+            # emissions["CO"] += float(event["CO"])
             emissions["CO2_TOTAL"] += float(event["CO2_TOTAL"])
-            emissions["HC"] += float(event["HC"])
+            # emissions["HC"] += float(event["HC"])
             emissions["NOx"] += float(event["NOx"])
             emissions["PM"] += float(event["PM"])
-            emissions["CO2_rep"] += float(event["CO2_rep"])
+            # emissions["CO2_rep"] += float(event["CO2_rep"])
         return emissions
 
     def total_emissions_by_link(self, input_links=None, timeslot=None):
         if input_links is None:
-
             def filter_exp(_event):
                 return _event
-
         else:
-
             def filter_exp(_event):
                 return (
                     _event["type"] == "departure" or _event["type"] == "arrival"
                 ) and _event["link"] in input_links
-
         filtered_events = self.timeslot_filter_events(timeslot)
 
         links = {}
         for event in filter(filter_exp, filtered_events):
             if not event["linkId"] in links:
                 links[event["linkId"]] = {
-                    "CO": 0.0,
+                    # "CO": 0.0,
                     "CO2_TOTAL": 0.0,
-                    "HC": 0.0,
+                    # "HC": 0.0,
                     "NOx": 0.0,
                     "PM": 0.0,
-                    "CO2_rep": 0.0,
+                    # "CO2_rep": 0.0,
                 }
-            links[event["linkId"]]["CO"] += float(event["CO"])
+            # links[event["linkId"]]["CO"] += float(event["CO"])
             links[event["linkId"]]["CO2_TOTAL"] += float(event["CO2_TOTAL"])
-            links[event["linkId"]]["HC"] += float(event["HC"])
+            # links[event["linkId"]]["HC"] += float(event["HC"])
             links[event["linkId"]]["NOx"] += float(event["NOx"])
             links[event["linkId"]]["PM"] += float(event["PM"])
-            links[event["linkId"]]["CO2_rep"] += float(event["CO2_rep"])
+            # links[event["linkId"]]["CO2_rep"] += float(event["CO2_rep"])
         return links
 
     def emissions_total_links_sum(self, local_links=None, timeslot=None):
@@ -1034,20 +1068,20 @@ class EmissionEvents(Events):
                 return _event["linkId"] in local_links
 
         emissions_sum = {
-            "CO": 0.0,
+            # "CO": 0.0,
             "CO2_TOTAL": 0.0,
-            "HC": 0.0,
+            # "HC": 0.0,
             "NOx": 0.0,
             "PM": 0.0,
-            "CO2_rep": 0.0,
+            # "CO2_rep": 0.0,
         }
 
         filtered_events = self.timeslot_filter_events(timeslot)
         for event in filter(filter_exp, filtered_events):
-            emissions_sum["CO"] += float(event["CO"])
+            # emissions_sum["CO"] += float(event["CO"])
             emissions_sum["CO2_TOTAL"] += float(event["CO2_TOTAL"])
-            emissions_sum["HC"] += float(event["HC"])
+            # emissions_sum["HC"] += float(event["HC"])
             emissions_sum["NOx"] += float(event["NOx"])
             emissions_sum["PM"] += float(event["PM"])
-            emissions_sum["CO2_rep"] += float(event["CO2_rep"])
+            # emissions_sum["CO2_rep"] += float(event["CO2_rep"])
         return emissions_sum
diff --git a/app/netedit/network.py b/app/netedit/network.py
index 0b1e04496b297c2479df5f77f471b23ad2dc2820..945b4d3f07b50075d57870211366da531b9c16e9 100644
--- a/app/netedit/network.py
+++ b/app/netedit/network.py
@@ -585,6 +585,7 @@ class Network:
                 "x": float(node["x"]),
                 "y": float(node["y"])
             }
+        logger.debug("point: %s", point)
         candidates = self.search_kdtree_range(
             point, kdtree=self.link_kd_tree, delta=delta
         )
diff --git a/app/utils.py b/app/utils.py
index 8ceaa9f4921889fe85b511ec2dd7bb9ce33686a2..45bb58bb2734a5b2eba33a9e48e557aa55f01d2b 100644
--- a/app/utils.py
+++ b/app/utils.py
@@ -2,6 +2,7 @@ import os
 import json
 import jsonschema
 import logging
+import time
 from typing import Union, List
 from flask import current_app as app
 from app.netedit.network import Network
@@ -110,10 +111,12 @@ def getEventsFromKeywords(inputPath, outputPath, keywords: list):
     output.close()
 
 
-def get_network_and_events(simulation_id, local: Union[bool, List] = False):
+def get_network_and_events(simulation_id):
     """
     Returns the network and the events objects.
     """
+    start = time.time()
+
     date = get_sim_dir_names([simulation_id])[0]
     network_path = f"{data_dir}/simulations/{simulation_id}/network.xml"
     events_path = (
@@ -126,9 +129,9 @@ def get_network_and_events(simulation_id, local: Union[bool, List] = False):
 
     network = Network(city=city, path=network_path)
 
-    import time
 
-    start = time.time()
+
+
     events = VehicleEvents(path=events_path, network=network)
 
     app.logger.warn(
diff --git a/assets/Urbanite_bilbao_decision_model.dxi b/assets/Urbanite_bilbao_decision_model.dxi
index 0493b967d8ddfd04b8bd903bfdbf4b363dbe8367..70f769b3c3912c2705984249e043852e569964a8 100644
--- a/assets/Urbanite_bilbao_decision_model.dxi
+++ b/assets/Urbanite_bilbao_decision_model.dxi
@@ -1,7 +1,7 @@
 <?xml version="1.0" encoding="UTF-8"?>
 <DEXi>
   <VERSION>5.05</VERSION>
-  <CREATED>2022-12-06T14:27:02</CREATED>
+  <CREATED>2023-03-30T15:23:15</CREATED>
   <OPTION>a</OPTION>
   <OPTION>b</OPTION>
   <SETTINGS/>
@@ -22,10 +22,11 @@
       </SCALEVALUE>
     </SCALE>
     <FUNCTION>
-      <LOW>100001100022100110002111022211221002221122221</LOW>
+      <LOW>110001100022111110002210022211111112211122211</LOW>
+      <CONSIST>False</CONSIST>
     </FUNCTION>
     <OPTION>1</OPTION>
-    <OPTION>0</OPTION>
+    <OPTION>1</OPTION>
     <ATTRIBUTE>
       <NAME>Local</NAME>
       <SCALE>
@@ -65,7 +66,7 @@
         </SCALE>
         <FUNCTION>
           <LOW>2221122111211101110011000</LOW>
-          <ENTERED>+-----------+-----------+</ENTERED>
+          <ENTERED>+-----------------------+</ENTERED>
           <WEIGHTS>50;50</WEIGHTS>
           <LOCWEIGHTS>50.00;50.00</LOCWEIGHTS>
           <NORMLOCWEIGHTS>50.00;50.00</NORMLOCWEIGHTS>
@@ -97,14 +98,14 @@
             </SCALEVALUE>
           </SCALE>
           <FUNCTION>
-            <LOW>00112011221122212223222330112211222122232223322334112221222322233223342334412223222332233423344334442223322334233443344434444</LOW>
-            <ENTERED>+------------------------------------------------------------------------------------------------+++-------------------------</ENTERED>
+            <LOW>00111011121112211222122230111211122112221222322233111221122212223222332233311222122232223322333233341222322233223332333433344</LOW>
+            <ENTERED>+---------------------------------------------------------------------------------------------------------------------------+</ENTERED>
             <WEIGHTS>33.3333333333333;33.3333333333333;33.3333333333333</WEIGHTS>
             <LOCWEIGHTS>33.33;33.33;33.33</LOCWEIGHTS>
             <NORMLOCWEIGHTS>33.33;33.33;33.33</NORMLOCWEIGHTS>
           </FUNCTION>
           <OPTION>1</OPTION>
-          <OPTION>4</OPTION>
+          <OPTION>3</OPTION>
           <ATTRIBUTE>
             <NAME>Local NOx</NAME>
             <SCALE>
@@ -313,7 +314,7 @@
         </SCALEVALUE>
       </SCALE>
       <FUNCTION>
-        <LOW>011012122</LOW>
+        <LOW>001012122</LOW>
       </FUNCTION>
       <OPTION>1</OPTION>
       <OPTION>2</OPTION>
@@ -394,8 +395,8 @@
             </SCALEVALUE>
           </SCALE>
           <FUNCTION>
-            <LOW>00111011121112211223122330111211122112231223322333111221122312233223332333411223122332233323334333441223322333233343334433444</LOW>
-            <ENTERED>+-------------------------------------------------------------------------------------------------------------------------+++</ENTERED>
+            <LOW>00111011121112211222122230111211122112221222322233111221122212223222332233311222122232223322333233341222322233223332333433344</LOW>
+            <ENTERED>+---------------------------------------------------------------------------------------------------------------------------+</ENTERED>
             <WEIGHTS>33.3333333333333;33.3333333333333;33.3333333333333</WEIGHTS>
             <LOCWEIGHTS>33.33;33.33;33.33</LOCWEIGHTS>
             <NORMLOCWEIGHTS>33.33;33.33;33.33</NORMLOCWEIGHTS>
@@ -541,11 +542,11 @@
           <NAME>City-wide Daily internal bike travels</NAME>
           <SCALE>
             <SCALEVALUE>
-              <NAME>+15%</NAME>
+              <NAME>-15%</NAME>
               <GROUP>BAD</GROUP>
             </SCALEVALUE>
             <SCALEVALUE>
-              <NAME>+5%</NAME>
+              <NAME>- 5%</NAME>
               <GROUP>BAD</GROUP>
             </SCALEVALUE>
             <SCALEVALUE>
@@ -553,11 +554,11 @@
               <DESCRIPTION>No change</DESCRIPTION>
             </SCALEVALUE>
             <SCALEVALUE>
-              <NAME>- 5%</NAME>
+              <NAME>+5%</NAME>
               <GROUP>GOOD</GROUP>
             </SCALEVALUE>
             <SCALEVALUE>
-              <NAME>-15%</NAME>
+              <NAME>+15%</NAME>
               <GROUP>GOOD</GROUP>
             </SCALEVALUE>
           </SCALE>