Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision
  • master
1 result

Target

Select target project
  • urbanite/open/traffic-simulation-dss
1 result
Select Git revision
  • master
1 result
Show changes
Commits on Source (10)
# deployed in city
URBANITE_CITY="amsterdam"
# UI settings for default map position and zoom
CITY_COORDINATES="52.3676,4.9041" # Amsterdam
# CITY_COORDINATES="43.2630,-2.9350" # Bilbao
# CITY_COORDINATES="60.1566, 24.9118" # Helsinki
# CITY_COORDINATES="38.1937, 15.5542" # Messina
CITY_ZOOM_LEVEL="15"
# server settings
DSS_PORT=8083
FLASK_SECRET_KEY="secret"
......
......@@ -8,6 +8,8 @@ import csv
import os
import logging
import random
from spwd import struct_spwd
import pandas as pd
import types
from typing import List, Dict
......@@ -42,15 +44,18 @@ def dexi_eval_json(city_id=None):
if city_id is None:
return {"message": "Error on frontend - city_id not specified"}
baseline_id = request.get_json()["baseline"]
compare_id = request.get_json()["compare"]
baseline_id = 1
compare_id = request.get_json()["compare"] # changed because we always have baseline as 1 on the UI, dropdowns to be removed
compare_simulation_name = get_compare_simulation_name(compare_id)
baseline_json, compare_json, _ = preprocess_kpi_jsons(baseline_id, compare_id)
# running dexi here normally
dexi_output, simulation_dirs = prepare_and_run_dexi(compare_json, city_id, compare_simulation_name,
baseline_id, compare_id, "temp")
write_results(dexi_output, baseline_id, simulation_dirs[0])
dexi_output = postprocess_dexi_result(dexi_output, baseline_json)
app.logger.debug("postprocessed \n%s", json.dumps(dexi_output, indent=2))
write_results(dexi_output, compare_id, simulation_dirs[1])
return {"success": True}
......@@ -77,10 +82,22 @@ def recommendation_analysis(city_id=None):
dexi_output, simulation_dirs = prepare_and_run_dexi(plus_one_json, city_id, compare_simulation_name,
baseline_id, compare_id, folder_name)
write_results(dexi_output, baseline_id, simulation_dirs[0])
write_results(dexi_output, compare_id, simulation_dirs[1])
# Remark: first we need to run the normal dexi pipeline, so we can create the temp folder. Then later we need to call the recommendation analysis function
compare_dexi_results(f'{data_dir}/temp/', f'{data_dir}/{folder_name}/', changed_element, folder_name, step_size)
# running dexi here normally
# Doing this because if we run recommendation engine after running runDSS on the front end, it will change the evaluated.json files
# and next time we open the UI we would have incorrect spider chart
compare_simulation_name = get_compare_simulation_name(compare_id)
baseline_json, compare_json, _ = preprocess_kpi_jsons(1, compare_id)
dexi_output, simulation_dirs = prepare_and_run_dexi(compare_json, city_id, compare_simulation_name,
1, compare_id, "temp")
dexi_output = postprocess_dexi_result(dexi_output, baseline_json)
write_results(dexi_output, compare_id, simulation_dirs[1])
return {"success": True}
@app.route("/dss/general_recommendation", methods=["GET"])
......@@ -167,10 +184,12 @@ def create_recommendation_text(kpi, amount_changed, caused_by, step_size):
return ""
if 'co2' in caused_by.replace(' ', '').lower() and 'co2' in kpi.replace(' ', '').lower():
return ""
return f'In order to improve the KPI of {kpi} by {int(amount_changed)*10}%, {caused_by} should be improved by {step_size*10}%\n'
if amount_changed < 0:
amount_changed *= -1
return f'In order to change the KPI of {kpi} by {int(amount_changed)*10}%, {caused_by} should be improved by {step_size*10}%.\n'
@app.route("/dss/get_specific_recommendation/", methods=["GET"])
@app.route("/dss/get_specific_recommendation", methods=["GET"])
def get_recommendation():
recommendation_for_plus_one_analysis = ""
recommendation_for_plus_two_analysis = ""
......@@ -237,16 +256,52 @@ def preprocess_kpi_jsons(baseline_id, compare_id):
Read the kpi data, calculate relative to baseline, discretize for dexi.
"""
baseline_json = read_kpi_jsons([baseline_id])[0]
structure_json = json.loads(json.dumps(baseline_json))
compare_json = read_kpi_jsons([compare_id])[0]
app.logger.debug("read kpis")
app.logger.debug("baseline kpis: \n%s", json.dumps(baseline_json, indent=2))
app.logger.debug(" compare kpis: \n%s", json.dumps(compare_json, indent=2))
simulation_ids = get_simulation_ids([baseline_json, compare_json])
baseline_json, compare_json = calculate_relative_kpi(baseline_json, compare_json)
app.logger.debug("relativitzed")
app.logger.debug("baseline kpis: \n%s", json.dumps(baseline_json, indent=2))
app.logger.debug(" compare kpis: \n%s", json.dumps(compare_json, indent=2))
baseline_json, compare_json = discretize_relative_kpi(baseline_json, compare_json)
app.logger.debug("discretized")
app.logger.debug("baseline kpis: \n%s", json.dumps(baseline_json, indent=2))
app.logger.debug(" compare kpis: \n%s", json.dumps(compare_json, indent=2))
return baseline_json, compare_json, simulation_ids
def postprocess_dexi_result(evaluated, structure):
to_change = {
"amsterdam": ["Mobility Policy Quality", "Local", "Local traffic", "Local bike infrastructure", "City-wide", "City-wide traffic", "City-wide bike infrastructure"],
"bilbao": ["Mobility Policy Quality", "Local", "Local Pollution", "Local Traffic", "City-wide", "City-wide Pollution", "City-wide Traffic"],
"helsinki": ["Mobility Policy Quality", "Local", "Local Pollution", "City-wide", "City-wide Pollution"],
"messina": []
}
app.logger.debug("evaluated: %s", evaluated)
app.logger.debug("structure: %s", structure)
result = {}
# top key is always simulatio name
top_level_key = list(evaluated.keys())[0]
evaluated = evaluated[top_level_key]
for key, value in evaluated.items():
app.logger.debug("key in evaluated: '%s': %s, %s, %s", key, value, type(value), key in evaluated)
if key in to_change[city_id]:
if value == "2":
value = "3"
elif value == "3":
value = "5"
result[key] = value
app.logger.debug("adds to result: %s", result[key])
return {top_level_key: result}
def read_kpi_jsons(sim_ids):
"""
Returns json object of kpi files for all simulations provided by ids.
......@@ -284,14 +339,14 @@ def calculate_relative_kpi(baseline_json: Dict, compare_json: Dict):
else:
try:
compare_result[b_k] = (
0.0
1.0
if compare_json[b_k] == 0.0
else float(b_v) / float(compare_json[b_k])
)
except RuntimeError as e:
app.logger.error("Error occured during relativizing KPIs.\n%s", e)
return
baseline_result[b_k] = 1
baseline_result[b_k] = 1.0
return baseline_result, compare_result
......@@ -307,7 +362,7 @@ def plus_one_kpi_change_generator(json_file: Dict, step_size, analysis_limit):
was_it_increased_flag = False
if k == 'simulation_id':
continue
if isinstance(v, dict):
if isinstance(v, dict) and len(v) != 0:
json_file[k] = plus_one_kpi_change_generator(v, step_size, analysis_limit)
for i in json_file[k]:
json_file[k] = i[0]
......@@ -440,56 +495,56 @@ def create_dexi_input_amsterdam(json_inputs, compare_sim_name):
+ [
json_inputs[i]["amsterdam"]["local"]
.get("traffic", {})
.get("bikeIntensity", 2)
.get("bikeIntensity", 3)
for i, _ in enumerate(compare_sim_name)
],
["Local bike congestion"]
+ [
json_inputs[i]["amsterdam"]["local"]
.get("traffic", {})
.get("bikeCongestion", 2)
.get("bikeCongestion", 3)
for i, _ in enumerate(compare_sim_name)
],
["Local bike safety"]
+ [
json_inputs[i]["amsterdam"]["local"]
.get("bikeInfrastructure", {})
.get("bikeSafety", 2)
.get("bikeSafety", 3)
for i, _ in enumerate(compare_sim_name)
],
["Local bikeability"]
+ [
json_inputs[i]["amsterdam"]["local"]
.get("bikeInfrastructure", {})
.get("bikeability", 2)
.get("bikeability", 3)
for i, _ in enumerate(compare_sim_name)
],
["City-wide bike intensity"]
+ [
json_inputs[i]["amsterdam"]["cityWide"]
.get("traffic", {})
.get("bikeIntensity", 2)
.get("bikeIntensity", 3)
for i, _ in enumerate(compare_sim_name)
],
["City-wide bike congestion"]
+ [
json_inputs[i]["amsterdam"]["cityWide"]
.get("traffic", {})
.get("bikeCongestion", 2)
.get("bikeCongestion", 3)
for i, _ in enumerate(compare_sim_name)
],
["City-wide bike safety"]
+ [
json_inputs[i]["amsterdam"]["cityWide"]
.get("bikeInfrastructure", {})
.get("bikeSafety", 2)
.get("bikeSafety", 3)
for i, _ in enumerate(compare_sim_name)
],
["City-wide bikeability"]
+ [
json_inputs[i]["amsterdam"]["cityWide"]
.get("bikeInfrastructure", {})
.get("bikeability", 2)
.get("bikeability", 3)
for i, _ in enumerate(compare_sim_name)
],
]
......@@ -506,7 +561,7 @@ def create_dexi_input_bilbao(json_inputs, compare_sim_name):
json_inputs[i]["bilbao"]
.get("local", {})
.get("pollution", {})
.get("NOx", 2)
.get("NOx", 3)
for i, _ in enumerate(compare_sim_name)
],
["Local PM10"]
......@@ -514,7 +569,7 @@ def create_dexi_input_bilbao(json_inputs, compare_sim_name):
json_inputs[i]["bilbao"]
.get("local", {})
.get("pollution", {})
.get("PM")
.get("PM", 3)
for i, _ in enumerate(compare_sim_name)
],
["Local CO2"]
......@@ -522,7 +577,7 @@ def create_dexi_input_bilbao(json_inputs, compare_sim_name):
json_inputs[i]["bilbao"]
.get("local", {})
.get("pollution", {})
.get("CO2_TOTAL", 2)
.get("CO2_TOTAL", 3)
for i, _ in enumerate(compare_sim_name)
],
["Local Acoustic pollution"]
......@@ -530,7 +585,7 @@ def create_dexi_input_bilbao(json_inputs, compare_sim_name):
json_inputs[i]["bilbao"]
.get("local", {})
.get("pollution", {})
.get("accousticPollution", 2)
.get("accousticPollution", 3)
for i, _ in enumerate(compare_sim_name)
],
["Local Pedestrian travel time"]
......@@ -538,7 +593,7 @@ def create_dexi_input_bilbao(json_inputs, compare_sim_name):
json_inputs[i]["bilbao"]
.get("local")
.get("traffic", {})
.get("pedestrianTravelTime", 2)
.get("pedestrianTravelTime", 3)
for i, _ in enumerate(compare_sim_name)
],
["Local Daily internal bike travels"]
......@@ -546,7 +601,7 @@ def create_dexi_input_bilbao(json_inputs, compare_sim_name):
json_inputs[i]["bilbao"]
.get("local")
.get("traffic", {})
.get("dailyInternalBikeTravels", 2)
.get("dailyInternalBikeTravels", 3)
for i, _ in enumerate(compare_sim_name)
],
["City-wide NOx"]
......@@ -554,7 +609,7 @@ def create_dexi_input_bilbao(json_inputs, compare_sim_name):
json_inputs[i]["bilbao"]
.get("cityWide", {})
.get("pollution", {})
.get("NOx", 2)
.get("NOx", 3)
for i, _ in enumerate(compare_sim_name)
],
["City-wide PM10"]
......@@ -562,7 +617,7 @@ def create_dexi_input_bilbao(json_inputs, compare_sim_name):
json_inputs[i]["bilbao"]
.get("cityWide", {})
.get("pollution", {})
.get("PM", 2)
.get("PM", 3)
for i, _ in enumerate(compare_sim_name)
],
["City-wide CO2"]
......@@ -570,7 +625,7 @@ def create_dexi_input_bilbao(json_inputs, compare_sim_name):
json_inputs[i]["bilbao"]
.get("cityWide", {})
.get("pollution", {})
.get("CO2_TOTAL", 2)
.get("CO2_TOTAL", 3)
for i, _ in enumerate(compare_sim_name)
],
["City-wide Acoustic pollution"]
......@@ -578,7 +633,7 @@ def create_dexi_input_bilbao(json_inputs, compare_sim_name):
json_inputs[i]["bilbao"]
.get("cityWide", {})
.get("pollution", {})
.get("pollution", 2)
.get("pollution", 3)
for i, _ in enumerate(compare_sim_name)
],
["City-wide Pedestrian travel time"]
......@@ -586,7 +641,7 @@ def create_dexi_input_bilbao(json_inputs, compare_sim_name):
json_inputs[i]["bilbao"]
.get("cityWide", {})
.get("traffic", {})
.get("pedestrianTravelTime", 2)
.get("pedestrianTravelTime", 3)
for i, _ in enumerate(compare_sim_name)
],
["City-wide Daily internal bike travels"]
......@@ -594,12 +649,12 @@ def create_dexi_input_bilbao(json_inputs, compare_sim_name):
json_inputs[i]["bilbao"]
.get("cityWide", {})
.get("traffic", {})
.get("dailyInternalBikeTravels", 2)
.get("dailyInternalBikeTravels", 3)
for i, _ in enumerate(compare_sim_name)
],
["Entry capacity to center"]
+ [
json_inputs[i]["bilbao"].get("entryCapacityToCenter", 2)
json_inputs[i]["bilbao"].get("entryCapacityToCenter", 3)
for i, _ in enumerate(compare_sim_name)
],
]
......@@ -615,61 +670,61 @@ def create_dexi_input_helsinki(json_inputs, compare_sim_name):
[""] + compare_sim_name,
["Local NOx"]
+ [
json_inputs[i]["helsinki"]["local"].get("pollution", {}).get("NOx", 2)
json_inputs[i]["helsinki"]["local"].get("pollution", {}).get("NOx", 3)
for i, _ in enumerate(compare_sim_name)
],
["Local PM10"]
+ [
json_inputs[i]["helsinki"]["local"].get("pollution", {}).get("PM", 2)
json_inputs[i]["helsinki"]["local"].get("pollution", {}).get("PM", 3)
for i, _ in enumerate(compare_sim_name)
],
["Local CO2"]
+ [
json_inputs[i]["helsinki"]["local"].get("pollution", {}).get("CO2_TOTAL", 2)
json_inputs[i]["helsinki"]["local"].get("pollution", {}).get("CO2_TOTAL", 3)
for i, _ in enumerate(compare_sim_name)
],
["Local Acoustic pollution"]
+ [
json_inputs[i]["helsinki"]["local"]
.get("pollution", {})
.get("acousticPollution", 2)
.get("acousticPollution", 3)
for i, _ in enumerate(compare_sim_name)
],
["Local congestions and bottlenecks"]
+ [
json_inputs[i]["helsinki"]["local"].get("congestionsAndBottlenecks", 2)
json_inputs[i]["helsinki"]["local"].get("congestionsAndBottlenecks", 3)
for i, _ in enumerate(compare_sim_name)
],
["City-wide NOx"]
+ [
json_inputs[i]["helsinki"]["cityWide"].get("pollution", {}).get("NOx", 2)
json_inputs[i]["helsinki"]["cityWide"].get("pollution", {}).get("NOx", 3)
for i, _ in enumerate(compare_sim_name)
],
["City-wide PM10"]
+ [
json_inputs[i]["helsinki"]["cityWide"].get("pollution", {}).get("PM", 2)
json_inputs[i]["helsinki"]["cityWide"].get("pollution", {}).get("PM", 3)
for i, _ in enumerate(compare_sim_name)
],
["City-wide CO2"]
+ [
json_inputs[i]["helsinki"]["cityWide"].get("pollution", {}).get("CO2_TOTAL", 2)
json_inputs[i]["helsinki"]["cityWide"].get("pollution", {}).get("CO2_TOTAL", 3)
for i, _ in enumerate(compare_sim_name)
],
["City-wide Acoustic pollution"]
+ [
json_inputs[i]["helsinki"]["cityWide"]
.get("pollution", {})
.get("acousticPollution", 2)
.get("acousticPollution", 3)
for i, _ in enumerate(compare_sim_name)
],
["City-wide congestions and bottlenecks"]
+ [
json_inputs[i]["helsinki"]["cityWide"].get("congestionsAndBottlenecks", 2)
json_inputs[i]["helsinki"]["cityWide"].get("congestionsAndBottlenecks", 3)
for i, _ in enumerate(compare_sim_name)
],
["Harbour area traffic flow"]
+ [
json_inputs[i].get("harbourAreaTrafficFlow", 2)
json_inputs[i].get("harbourAreaTrafficFlow", 3)
for i, _ in enumerate(compare_sim_name)
],
]
......@@ -685,84 +740,84 @@ def create_dexi_input_messina(json_inputs, compare_sim_name):
+ [
json_inputs[i]["messina"]["local"]
.get("publicTransport", {})
.get("publicTransportUse", 2)
.get("publicTransportUse", 3)
for i, _ in enumerate(compare_sim_name)
],
["Local average speed of public transport"]
+ [
json_inputs[i]["messina"]["local"]
.get("publicTransport", {})
.get("averageSpeedOfPublicTransport", 2)
.get("averageSpeedOfPublicTransport", 3)
for i, _ in enumerate(compare_sim_name)
],
["Local number of bike trips"]
+ [
json_inputs[i]["messina"]["local"]
.get("publicTransport", {})
.get("numberOfBikeTrips", 2)
.get("numberOfBikeTrips", 3)
for i, _ in enumerate(compare_sim_name)
],
["Local share of public transport"]
+ [
json_inputs[i]["messina"]["local"]
.get("shareOfTrips", {})
.get("shareOfPublicTransport", 2)
.get("shareOfPublicTransport", 3)
for i, _ in enumerate(compare_sim_name)
],
["Local share of car trips"]
+ [
json_inputs[i]["messina"]["local"]
.get("shareOfTrips", {})
.get("shareOfCarTrips", 2)
.get("shareOfCarTrips", 3)
for i, _ in enumerate(compare_sim_name)
],
["Local share of bicycles"]
+ [
json_inputs[i]["messina"]["local"]
.get("shareOfTrips", {})
.get("shareOfBicycles", 2)
.get("shareOfBicycles", 3)
for i, _ in enumerate(compare_sim_name)
],
["City-wide public transport use"]
+ [
json_inputs[i]["messina"]["cityWide"]
.get("publicTransport", {})
.get("publicTransportUse", 2)
.get("publicTransportUse", 3)
for i, _ in enumerate(compare_sim_name)
],
["City-wide average speed of public transport"]
+ [
json_inputs[i]["messina"]["cityWide"]
.get("publicTransport", {})
.get("averageSpeedOfPublicTransport", 2)
.get("averageSpeedOfPublicTransport", 3)
for i, _ in enumerate(compare_sim_name)
],
["City-wide number of bike trips"]
+ [
json_inputs[i]["messina"]["cityWide"]
.get("publicTransport", {})
.get("numberOfBikeTrips", 2)
.get("numberOfBikeTrips", 3)
for i, _ in enumerate(compare_sim_name)
],
["City-wide share of public transport"]
+ [
json_inputs[i]["messina"]["cityWide"]
.get("shareOfTrips", {})
.get("shareOfPublicTransport", 2)
.get("shareOfPublicTransport", 3)
for i, _ in enumerate(compare_sim_name)
],
["City-wide share of car trips"]
+ [
json_inputs[i]["messina"]["cityWide"]
.get("shareOfTrips", {})
.get("shareOfCarTrips", 2)
.get("shareOfCarTrips", 3)
for i, _ in enumerate(compare_sim_name)
],
["City-wide share of bicycles"]
+ [
json_inputs[i]["messina"]["cityWide"]
.get("shareOfTrips", {})
.get("shareOfBicycles", 2)
.get("shareOfBicycles", 3)
for i, _ in enumerate(compare_sim_name)
],
]
......
......@@ -5,6 +5,7 @@ import os
import json
import threading
import logging
import time
from typing import Dict, List, Union
from pyproj import Transformer
......@@ -25,6 +26,29 @@ city = app.dotenv["URBANITE_CITY"]
data_dir = f"{app.dotenv['DATA_DIR']}/{app.dotenv['URBANITE_CITY']}"
# DEBUG
@app.route("/dss/geojson/test/<sim_id>")
def geojsontest(sim_id):
app.logger.debug("starting test")
kpi_name = "pedestrianTravelTime"
app.logger.debug("starting load network and events")
network, events = utils.get_network_and_events(sim_id)
app.logger.debug("done")
app.logger.debug("starting calculate pedestrian travel time")
start_t = time.time()
pedestrianTravelTime = events.pedestrian_travel_time()["detailed_long_trips"]
delta_t = time.time() - start_t
app.logger.debug("done in %s s", delta_t)
app.logger.debug("starting write geojson pedestrian trips")
start_t = time.time()
pedestrianTravelTime = write_geojson_pedestrian_trips(
pedestrianTravelTime, network, sim_id
)
delta_t = time.time() - start_t
app.logger.info("written pedestrianTravelTime in %s s", delta_t)
return pedestrianTravelTime
@app.route("/dss/geojson/<int:sim_id>/<kpi_name>")
def get_kpi_geojson(sim_id, kpi_name):
sim_dir = utils.get_sim_dir_names([sim_id])[0]
......@@ -59,6 +83,10 @@ def get_kpi_vis_info():
"buttonLabel": "Pedestrian Travel Time",
},
{"buttonValue": "dailyInternalTravels", "buttonLabel": "Internal Trips"},
{
"buttonValue": "noisePollution",
"buttonLabel": "Noise Pollution",
},
]
elif city == "amsterdam":
kpi_info = [
......@@ -71,10 +99,6 @@ def get_kpi_vis_info():
elif city == "helsinki":
kpi_info = [
{"buttonValue": "pollution", "buttonLabel": "Pollution"},
{
"buttonValue": "accousticPollution",
"buttonLabel": "Accoustic Pollution - WIP",
},
{
"buttonValue": "congestionsAndBottlenecks",
"buttonLabel": "Congestions and Bottlenecks",
......@@ -95,7 +119,7 @@ def get_kpi_vis_info():
"buttonValue": "averageSpeedOfPublicTransport",
"buttonLabel": "Avg Public Transport Speed",
},
{"buttonValue": "numberOfBikeTrips", "buttonLabel": "Number of Bike Trips"},
# {"buttonValue": "numberOfBikeTrips", "buttonLabel": "Number of Bike Trips"},
{
"buttonValue": "shareOfVehicles",
"buttonLabel": "Share of Trips per Mode",
......@@ -114,33 +138,36 @@ def get_kpi_data_and_write(network, events, sim_id):
"""
get_kpi_data_final_geojson is a list of links with kpis calculated
"""
app.logger.info("getting kpi data to write")
app.logger.info(f"getting kpi data to write, city {city}")
results = {}
emisson_object = kpis_module.get_emissions(sim_id, network)
rush_hour = utils.get_max_traffic_time(sim_id)
if network.city == "bilbao":
if emisson_object is not None:
pollution = emisson_object.total_emissions_by_link()
app.logger.info("Emission by link")
write_geojson(pollution, "pollution", network, sim_id)
write_geojson_links(pollution, "pollution", network, sim_id)
app.logger.info("written emission")
pedestrianTravelTime = events.pedestrian_travel_time()[0]
pedestrianTravelTime = events.pedestrian_travel_time()["detailed_long_trips"]
app.logger.info("Pedestrian travel time")
write_geojson(pedestrianTravelTime, "pedestrianTravelTime", network, sim_id)
write_geojson_pedestrian_trips(pedestrianTravelTime, network, sim_id)
app.logger.info("written pedestrianTravelTime")
dailyInternalTravels = events.vehicles_count_per_link()
app.logger.info("Vehicles count per link")
write_geojson(dailyInternalTravels, "dailyInternalTravels", network, sim_id)
write_geojson_links(
dailyInternalTravels, "dailyInternalTravels", network, sim_id
)
app.logger.info("written dailyInternalTravels")
elif network.city == "helsinki":
if emisson_object is not None:
pollution = emisson_object.total_emissions_by_link()
app.logger.info("total emissions by link")
write_geojson(pollution, "pollution", network, sim_id)
write_geojson_links(pollution, "pollution", network, sim_id)
app.logger.info("written emission")
# acousticPollution = None # TODO integrate with Inaki
......@@ -148,9 +175,11 @@ def get_kpi_data_and_write(network, events, sim_id):
# write_geojson(pollution, "pollution", network, sim_id)
# app.logger.info("written emission")
congestionsAndBottlenecks = events.get_congested_links()
congestionsAndBottlenecks = events.get_congested_links(rush_hour)[
"congested_links"
]
app.logger.info("congestions and bottlenecks")
write_geojson(
write_geojson_links(
congestionsAndBottlenecks, "congestionsAndBottlenecks", network, sim_id
)
app.logger.info("written congestionsAndBottlenecks")
......@@ -159,20 +188,21 @@ def get_kpi_data_and_write(network, events, sim_id):
app.logger.info("harbour area traffic flow - TODO")
elif network.city == "messina":
if emisson_object is not None:
pollution = emisson_object.total_emissions_by_link()
app.logger.info("Emission by link")
write_geojson(pollution, "pollution", network, sim_id)
write_geojson_links(pollution, "pollution", network, sim_id)
app.logger.info("written emission")
publicTransportUse = events.public_transport_use_geojson()
app.logger.info("public transport use")
write_geojson(publicTransportUse, "publicTransportUse", network, sim_id)
write_geojson_links(publicTransportUse, "publicTransportUse", network, sim_id)
app.logger.info("written public transport use")
averageSpeedOfPublicTransport = events.average_bus_speed_geojson() # TODO
averageSpeedOfPublicTransport = events.average_bus_speed_geojson()
app.logger.info("average speed of public transport")
write_geojson(
write_geojson_links(
averageSpeedOfPublicTransport,
"averageSpeedOfPublicTransport",
network,
......@@ -182,46 +212,48 @@ def get_kpi_data_and_write(network, events, sim_id):
numberOfBikeTrips = events.vehicles_count_per_link(only="bike")
app.logger.info("number of bike trips")
write_geojson(numberOfBikeTrips, "numberOfBikeTrips", network, sim_id)
write_geojson_links(numberOfBikeTrips, "numberOfBikeTrips", network, sim_id)
app.logger.info("written numberOfBikeTrips")
shareOfVehicles = vehicleCountToShareGeojson(events.vehicles_count_per_link())
# shareOfPublicTransport = events.share_public_transport()
app.logger.info("share of vehicles")
write_geojson(shareOfVehicles, "shareOfVehicles", network, sim_id)
write_geojson_links(shareOfVehicles, "shareOfVehicles", network, sim_id)
app.logger.info("written shareOfVehicles")
dailyInternalTravels = events.vehicles_count_per_link()
app.logger.info("daily internal travels")
write_geojson(dailyInternalTravels, "dailyInternalTravels", network, sim_id)
write_geojson_links(
dailyInternalTravels, "dailyInternalTravels", network, sim_id
)
app.logger.info("written dailyInternalTravels")
elif network.city == "amsterdam":
if emisson_object is not None:
pollution = emisson_object.total_emissions_by_link()
app.logger.info("got emissions by link")
write_geojson(pollution, "pollution", network, sim_id)
write_geojson_links(pollution, "pollution", network, sim_id)
app.logger.info("written pollution")
bikeability = events.bikeability_index()
app.logger.info("bikeability")
write_geojson(bikeability, "bikeability", network, sim_id)
write_geojson_links(bikeability, "bikeability", network, sim_id)
app.logger.info("written bikeability")
bike_safety = events.bike_safety_index()
app.logger.info("bike safety")
write_geojson(bike_safety, "bikeSafety", network, sim_id)
write_geojson_links(bike_safety, "bikeSafety", network, sim_id)
app.logger.info("written bikeSafety")
bike_intensity = events.vehicles_count_per_link()
app.logger.info("bikeIntensity")
write_geojson(bike_intensity, "bikeIntensity", network, sim_id)
write_geojson_links(bike_intensity, "bikeIntensity", network, sim_id)
app.logger.info("written bikeIntensity")
bike_congestion = events.get_congested_links(vehicle_mode="bicycle")
bike_congestion = events.get_congested_links(rush_hour, vehicle_mode="bicycle")[
"congested_links"
]
app.logger.info("bikeCongestion")
write_geojson(bike_congestion, "bikeCongestion", network, sim_id)
write_geojson_links(bike_congestion, "bikeCongestion", network, sim_id)
app.logger.info("written bikeCongestion")
app.logger.info("done")
......@@ -249,19 +281,62 @@ def vehicleCountToShareGeojson(data_per_link):
return results
def write_geojson(kpi_data, kpi_name, network, sim_id):
def write_geojson_pedestrian_trips(kpi_data, network, sim_id):
"""
Write a pedestrian kpi_data object to a file.
kpi_data should be a list [{"start_link": _, "end_link": _, "average_time": _ } ]
"""
data = {"type": "FeatureCollection", "features": []}
crs_transformer = Transformer.from_crs(network.crs_epsg, "epsg:4326")
# crs_transformer = Transformer.from_crs("epsg:2062", "epsg:4326")
for element in kpi_data:
if element["average_time"] < 3600.0:
continue
s_link = network.get_link(element["start_link"])
s_nodes = [network.get_node(s_link["from"]), network.get_node(s_link["to"])]
s_point = (
(float(s_nodes[0]["x"]) + float(s_nodes[1]["x"])) / 2.0,
(float(s_nodes[0]["y"]) + float(s_nodes[1]["y"])) / 2.0,
)
s_point = tuple(reversed(crs_transformer.transform(*s_point)))
e_link = network.get_link(element["end_link"])
e_nodes = [network.get_node(e_link["from"]), network.get_node(e_link["to"])]
e_point = (
(float(e_nodes[0]["x"]) + float(e_nodes[1]["x"])) / 2.0,
(float(e_nodes[0]["y"]) + float(e_nodes[1]["y"])) / 2.0,
)
e_point = tuple(reversed(crs_transformer.transform(*e_point)))
data["features"].append(
{
"type": "Feature",
"properties": {"long pedestrian trip": element["average_time"] / (60)},
"geometry": {"type": "LineString", "coordinates": [s_point, e_point]},
}
)
filename = get_filename(sim_id, "pedestrianTravelTime")
with open(filename, "w") as fp:
json.dump(data, fp, indent=2)
return data
def write_geojson_links(kpi_data, kpi_name, network, sim_id):
"""
Write any kpi_data object to a file.
Write a per link kpi_data object to a file.
kpi_data should be a dict { "link_id": {kpi1, kpi2, ...}, ...}
"""
logger.debug("KPI DATA to be written: \n%s", kpi_data)
# logger.debug("KPI DATA to be written: \n%s", kpi_data)
data = {"type": "FeatureCollection", "features": []}
crs_transformer = Transformer.from_crs(network.crs_epsg, "epsg:4326")
logger.warning("network city: %s, network epsg: %s", network.city, network.crs_epsg)
for link_id, link_data in kpi_data.items():
if link_id.startswith("pt"):
continue
if len(link_data) < 1:
# if isinstance(link_data, float):
# link_data = {kpi_name: link_data}
elif len(link_data) < 1:
continue
link = network.get_link(link_id)
......@@ -289,7 +364,7 @@ def write_geojson(kpi_data, kpi_name, network, sim_id):
filename = get_filename(sim_id, kpi_name)
with open(filename, "w") as fp:
json.dump(data, fp)
json.dump(data, fp, indent=2)
def get_filename(sim_id, kpi_name):
......
......@@ -5,16 +5,10 @@ import time
import json
import os.path
import logging
from typing import Union, List
import flask
from jsonschema import validate
import jsonpath_ng
from flask import Blueprint
from flask import current_app as app
from app.netedit.network import Network
from app.netedit.events import TimeSlot, VehicleEvents
from app.netedit.events import EmissionEvents
from app import utils
......@@ -30,15 +24,15 @@ assets_dir = app.dotenv["ASSETS_DIR"]
logger = logging.getLogger(__name__)
# DEBUG
# @app.route("/dss/test/<int:simulation_id>", methods=["GET"])
# def test_kpi_congestions(simulation_id):
# rush_hour = utils.get_max_traffic_time(simulation_id)
# network, events = utils.get_network_and_events(simulation_id)
# # result = events.get_congested_links(None, None, None, rush_hour)
# ll = network.nearby_links_range("1364605710", 160)
# app.logger.debug(ll)
# result = events.average_bus_speed(local_links=ll)
# return {"avg_bus_speed": result}
@app.route("/dss/test/<int:simulation_id>", methods=["GET"])
def test_kpi_congestions(simulation_id):
app.logger.info("test called")
# rush_hour = utils.get_max_traffic_time(simulation_id)
network, events = utils.get_network_and_events(simulation_id)
# ll = network.nearby_links_range("1691828242", 500)
result = events.bikeability_index()
# app.logger.info("result:\n%s", result)
return {"res": result}
@app.route("/dss/kpis/<city_id>/<int:simulation_id>", methods=["GET"])
def get_kpis_for_city(city_id, simulation_id):
......@@ -61,7 +55,6 @@ def get_kpis_for_city(city_id, simulation_id):
if city_id == "bilbao":
emission_object = get_emissions(simulation_id, network)
rush_hour_time = utils.get_max_traffic_time(simulation_id) # rush hour time
results = prepare_results_for_bilbao(
simulation_id, network, events, emission_object, rush_hour_time
)
......@@ -265,7 +258,7 @@ def prepare_results_for_bilbao(
"""
Creates the kpis JSON object for Bilbao.
"""
app.logger.info("Preparing results for bilbao %s", timeslot)
app.logger.info("prepare_results_for_bilbao with timeslot %s", timeslot)
results = {"simulation_id": simulation_id}
results["bilbao"] = {
"cityWide": {"traffic": {}, "pollution": {}},
......@@ -279,17 +272,15 @@ def prepare_results_for_bilbao(
results["bilbao"]["cityWide"]["pollution"] = kpi
results["bilbao"]["cityWide"]["traffic"] = {}
kpi = events.pedestrian_travel_time(timeslot)
results["bilbao"]["cityWide"]["traffic"]["pedestrianTravelTime"] = kpi[0]
kpi = events.pedestrian_travel_time(timeslot=timeslot)
results["bilbao"]["cityWide"]["traffic"]["pedestrianTravelTime"] = kpi["average_trip_duration"]
# app.logger.info(kpi[0])
kpi = events.vehicles_count(timeslot)
results["bilbao"]["cityWide"]["traffic"]["dailyInternalBikeTravels"] = kpi[
"bike_count"
]
# local
# moyua center position in EPSG 32630
results["bilbao"]["cityWide"]["traffic"]["dailyInternalBikeTravels"] = kpi["bike_count"]
#
# # local
# # moyua center position in EPSG 32630
moyua_point = {"x": 505277.192210581, "y": 4790023.046986237}
local_links = network.nearby_links_range(moyua_point, delta=200)
......@@ -298,43 +289,20 @@ def prepare_results_for_bilbao(
results["bilbao"]["local"]["pollution"] = kpi
results["bilbao"]["local"]["traffic"] = {}
kpi = events.pedestrian_travel_time(local_links, timeslot)
results["bilbao"]["local"]["traffic"]["pedestrianTravelTime"] = kpi[0]
# kpi = events.pedestrian_travel_time(local_links, timeslot)
# makes no sense, can't have long trips in a small area
results["bilbao"]["local"]["traffic"]["pedestrianTravelTime"] = 0
kpi = events.vehicles_count(local_links, timeslot)
results["bilbao"]["local"]["traffic"]["dailyInternalBikeTravels"] = kpi[
"bike_count"
]
# other
moyua_square_links = [
"22691",
"22692",
"51616",
"51617",
"52446",
"52447",
"45116",
"45117",
"45118",
"37953",
"45140",
"33748",
"33749",
"2484",
"2485",
"26087",
"47250",
"26024",
"43904",
"20252",
]
# If timeslot is not given, we are only interested in rush_hour_time
if not timeslot:
timeslot = TimeSlot(delta=3600, slot_index=rush_hour_time)
_, total_capacity = events.capacity_to_moyua(moyua_square_links, timeslot)
total_capacity = events.capacity_to_moyua(local_links, timeslot)
results["bilbao"]["entryCapacityToCenter"] = total_capacity
return results
......@@ -360,19 +328,19 @@ def prepare_results_for_amsterdam(simulation_id, network, events, timeslot=None)
if kpi == 0:
kpi = 1 # TODO this needs to be fixes, as above
results["amsterdam"]["cityWide"]["traffic"]["bikeIntensity"] = kpi
app.logger.warn("bike intensity: %s", kpi)
# app.logger.warn("bike intensity: %s", kpi)
kpi = events.get_congested_links(rush_hour, vehicle_mode="bicycle")["congestion_length"]
results["amsterdam"]["cityWide"]["traffic"]["bikeCongestion"] = kpi
app.logger.warn("bike congestion: %s", kpi)
# app.logger.warn("bike congestion: %s", kpi)
kpi = events.bike_safety_aggregate(events.bike_safety_index(timeslot))
results["amsterdam"]["cityWide"]["bikeInfrastructure"]["bikeSafety"] = kpi
app.logger.warn("bike infrastructure: %s", kpi)
# app.logger.warn("bike infrastructure: %s", kpi)
kpi = events.bikeability_aggregate(events.bikeability_index())
results["amsterdam"]["cityWide"]["bikeInfrastructure"]["bikeability"] = kpi
app.logger.warn("bikeability: %s", kpi)
# app.logger.warn("bikeability: %s", kpi)
# local
app.logger.info("local kpis...")
......@@ -383,19 +351,19 @@ def prepare_results_for_amsterdam(simulation_id, network, events, timeslot=None)
kpi = events.vehicles_count(local_links=local_links, timeslot=timeslot)["bike_count"]
results["amsterdam"]["local"]["traffic"]["bikeIntensity"] = kpi
app.logger.warn("bikeintensity: %s", kpi)
# app.logger.warn("bikeintensity: %s", kpi)
kpi = events.get_congested_links(rush_hour, vehicle_mode="bicycle", local_links=local_links)["congestion_length"]
results["amsterdam"]["local"]["traffic"]["bikeCongestion"] = kpi
app.logger.info("bike congestion: %s", kpi)
# app.logger.info("bike congestion: %s", kpi)
kpi = events.bike_safety_aggregate(events.bike_safety_index(timeslot))
results["amsterdam"]["local"]["bikeInfrastructure"]["bikeSafety"] = kpi
app.logger.info("bike safety: %s", kpi)
# app.logger.info("bike safety: %s", kpi)
kpi = events.bikeability_aggregate(events.bikeability_index())
results["amsterdam"]["local"]["bikeInfrastructure"]["bikeability"] = kpi
app.logger.info("bikeability: %s", kpi)
# app.logger.info("bikeability: %s", kpi)
return results
......@@ -410,11 +378,11 @@ def prepare_results_for_helsinki(
results["simulation_id"] = simulation_id
results["helsinki"] = {
"cityWide": {
"pollution": {"acousticPollution": -1},
"pollution": {"acousticPollution": 3},
"congestionsAndBottlenecks": -1,
},
"local": {
"pollution": {"acousticPollution": -1},
"pollution": {"acousticPollution": 3},
"congestionsAndBottlenecks": -1,
},
"harbourAreaTrafficFlow": -1,
......@@ -425,20 +393,16 @@ def prepare_results_for_helsinki(
kpi = emission_object.emissions_total(timeslot)
results["helsinki"]["cityWide"]["pollution"]["emissions"] = kpi
# kpi = events.get_congested_links() # TODO: fix, this returns a big list of some links, instead should be a number?
results["helsinki"]["cityWide"][
"congestionsAndBottlenecks"
] = events.get_congested_links(rush_hour_time)["congestion_length"]
# local
# Jatkasaari center position in EPSG 32634
jatkasaari_point = {"x": 717321.1395281242, "y": 6675402.725594756}
local_links = network.nearby_links_range(jatkasaari_point, delta=500)
network, events = utils.get_network_and_events(simulation_id, local=local_links)
local_links = network.nearby_links_range("4449417566", delta=500)
if emission_object != None:
kpi = emission_object.emissions_total_links_sum(local_links, timeslot)
app.logger.info(f"EMISSIONS: {kpi}")
# app.logger.info(f"EMISSIONS: {kpi}")
results["helsinki"]["local"]["pollution"] = kpi
kpi = emission_object.total_emissions_by_link(local_links, timeslot)
......@@ -456,6 +420,7 @@ def prepare_results_for_messina(simulation_id, network, events, timeslot=None):
"""
Prepares the kpis JSON object for Messina.
"""
app.logger.info("prepare_results_for_messina with timeslot %s", timeslot)
results = {}
results["simulation_id"] = simulation_id
results["messina"] = {"cityWide": {}, "local": {}}
......@@ -486,10 +451,7 @@ def prepare_results_for_messina(simulation_id, network, events, timeslot=None):
# Local
results["messina"]["local"]["publicTransport"] = {}
# TODO: Check again where these changes are in convert from/to appropriate EPSG
point_near_changes = {"x": 547873.917444, "y": 4224853.800459}
local_links = network.nearby_links_range(point_near_changes, delta=500)
del vehicles_count
local_links = network.nearby_links_range("1691828242", delta=1800)
vehicles_count = events.vehicles_count(local_links=local_links, timeslot=timeslot)
kpi = events.average_bus_speed(local_links=local_links, timeslot=timeslot)
results["messina"]["local"]["publicTransport"][
......
......@@ -13,6 +13,7 @@ from xml.etree import ElementTree as ET
import logging
from flask import current_app as app
import pandas as pd
import numpy as np
TimeSlot = namedtuple("TimeSlot", "delta slot_index")
......@@ -150,6 +151,7 @@ class VehicleEvents(Events):
Events.__init__(self, path, network)
self.trips_number = self.trips_amount()
# TODO this doesn't work for Amsterdam - is always 10 or 0
def bike_safety_index(self, local_links=None, timeslot=None) -> Dict:
"""
Calculates the bike safety KPI, based on
......@@ -175,21 +177,28 @@ class VehicleEvents(Events):
"bikeability_safety": 0,
"capacity": capacity,
}
ev_vehicle = event["vehicle"].split("_")
if len(ev_vehicle) > 1:
if ev_vehicle[1] == "bicycle":
links[event["link"]]["bike_count"] += 1
elif ev_vehicle[1] == "bus":
links[event["link"]]["bus_count"] += 1
elif event["vehicle"].isnumeric() or event["vehicle"].find(".1") >= 0:
ev_vehicle = event["vehicle"]
if "bicycle" in ev_vehicle:
links[event["link"]]["bike_count"] += 1
elif "bus" in ev_vehicle:
links[event["link"]]["bus_count"] += 1
else:
links[event["link"]]["car_count"] += 1
for link_id, link_data in links.items():
if links.get(link_id, False) and link_data["bike_count"] > 0:
links[link_id]["bikeability_safety"] = 10 * (
1 - link_data["car_count"] / link_data["capacity"]
)
b_c = link_data["bike_count"]
m_c = link_data["car_count"] + link_data["bus_count"]
suma = b_c + m_c
links[link_id]["bikeability_safety"] = (math.sin(b_c/suma) + math.sin(-(m_c/suma)))*5 +5
links[link_id].pop("bike_count", None)
links[link_id].pop("bus_count", None)
links[link_id].pop("car_count", None)
links[link_id].pop("capacity", None)
return links
# TODO fix after fixing bike_safety_index above
def bike_safety_aggregate(self, safety_score: List[Dict]):
safety_avg = 0
n_scores = 0
......@@ -212,8 +221,12 @@ class VehicleEvents(Events):
if local_links is not None:
filter_expr = lambda link: link["id"] in local_links
score = {}
n_links_with_osm_way = 0
for link in filter(filter_expr, self.network.links):
if "osm:way:highway" in link:
n_links_with_osm_way += 1
# logger.info("link iwth osm:way:h %s", link["osm:way:highway"])
if link["osm:way:highway"] == "steps":
score[link["id"]] = {"infra": 0, "speed": 0}
elif link["osm:way:highway"] == "motorway":
......@@ -247,14 +260,14 @@ class VehicleEvents(Events):
elif link["osm:way:highway"] == "cycleway":
score[link["id"]] = {"infra": 10, "speed": 0}
# speed limit 30 km/h - 8.33 m/s - 10 points
if float(link["freespeed"]) < 8.4:
# speed limit 30 km/h - 8.7 m/s - 10 points
if float(link["freespeed"]) < 9:
if not score.get(link["id"], False):
score[link["id"]] = {"infra": 0, "speed": 10}
else:
score[link["id"]]["speed"] = 10
# speed limit 50 km/h - 13.88 m/s - 7 points
elif float(link["freespeed"]) < 13.9:
elif float(link["freespeed"]) < 14:
if not score.get(link["id"], False):
score[link["id"]] = {"infra": 0, "speed": 7}
else:
......@@ -265,7 +278,7 @@ class VehicleEvents(Events):
score[link["id"]] = {"infra": 0, "speed": 0}
else:
score[link["id"]]["speed"] = 0
# logger.info("bikeability index found %s links with osm:way: property", n_links_with_osm_way)
return score
@staticmethod
......@@ -279,7 +292,8 @@ class VehicleEvents(Events):
infra_avg /= len(scores)
speed_avg /= len(scores)
except ZeroDivisionError:
pass
logger.warn("No infra or speed scores - missing data in network?")
return (len(scores) / (len(scores) + len(scores))) * infra_avg + (
len(scores) / (len(scores) + len(scores))
) * speed_avg
......@@ -289,12 +303,9 @@ class VehicleEvents(Events):
Calculates the daily vehicle counts for each link.
"""
if local_links is None:
def filter_exp(event):
return event["type"] == "entered link"
else:
def filter_exp(event):
return event["type"] == "entered link" and event["link"] in local_links
......@@ -308,17 +319,12 @@ class VehicleEvents(Events):
"car_count": 0,
"bike_count": 0,
}
ev_vehicle = event["vehicle"].split("_")
if len(ev_vehicle) > 1:
if ev_vehicle[1] == "bicycle" and check_only("bike"):
ev_vehicle = event["vehicle"]
if "bicycle" in ev_vehicle:
links[event["link"]]["bike_count"] += 1
elif ev_vehicle[1] == "bus" and check_only("bus"):
elif "bus" in ev_vehicle and not check_only == "bike":
links[event["link"]]["bus_count"] += 1
elif (
event["vehicle"].isnumeric()
or event["vehicle"].find(".1") >= 0
and check_only("car")
):
elif not check_only == "bike":
links[event["link"]]["car_count"] += 1
return links
......@@ -327,62 +333,36 @@ class VehicleEvents(Events):
Calculates daily vehicle counts in the network.
"""
if local_links is None:
def filter_exp(event):
return event["type"] == "entered link"
else:
def filter_exp(event):
return event["type"] == "entered link" and event["link"] in local_links
bicycles = set()
cars = set()
busses = set()
filtered_events = self.timeslot_filter_events(timeslot)
for event in filter(filter_exp, filtered_events):
ev_vehicle = event["vehicle"].split("_")
if len(ev_vehicle) > 1:
if ev_vehicle[1] == "bicycle":
bicycles.add(event["vehicle"])
elif ev_vehicle[1] == "bus":
busses.add(event["vehicle"])
elif event["vehicle"].isnumeric() or event["vehicle"].find(".1") >= 0:
cars.add(event["vehicle"])
links = {
"bus_count": len(busses),
"car_count": len(cars),
"bike_count": len(bicycles),
}
return links
# TODO moyua links
def capacity_to_moyua(self, moyua_square_links=None, timeslot=None):
if moyua_square_links is None:
moyua_square_links = [
"22691",
"22692",
"51616",
"51617",
"52446",
"52447",
"45116",
"45117",
"45118",
"37953",
"45140",
"33748",
"33749",
"2484",
"2485",
"26087",
"47250",
"26024",
"43904",
"20252",
]
ev_vehicle = event["vehicle"]
if "bicycle" in ev_vehicle: bicycles.add(event["vehicle"])
elif "bus" in ev_vehicle: busses.add(event["vehicle"])
else: cars.add(event["vehicle"])
logger.error(f"BUSSES: {len(busses)}")
logger.error(f"BIKES: {len(bicycles)}")
logger.error(f"CARS: {len(cars)}")
results = {}
if len(busses) > 0: results["bus_count"] = len(busses)
if len(cars) > 0: results["car_count"] = len(cars)
if len(bicycles) > 0: results["bike_count"] = len(bicycles)
return results
def capacity_to_moyua(self, moyua_square_links, timeslot=None):
links = {}
free_capacity = {}
filtered_events = self.timeslot_filter_events(timeslot)
......@@ -418,19 +398,18 @@ class VehicleEvents(Events):
# links is a dictionary of link ids and {many data within curly braces as in a json format}
for link, data in links.items():
free_capacity[link] = float(data["capacity"]) * 0.1 - (
float(data["bus_count"]) * 3
+ float(data["car_count"]) * 1
+ float(data["bike_count"]) * 0.25
free_capacity[link] = float(data["capacity"]) - (
float(data["bus_count"])
+ float(data["car_count"])
+ float(data["bike_count"])
)
total_capacity = 0
for link in free_capacity:
total_capacity += free_capacity[link]
return free_capacity, total_capacity
return total_capacity
# TODO test
def average_bus_speed(self, local_links=None, timeslot=None) -> float:
vehicles = {}
if local_links is None:
......@@ -449,10 +428,6 @@ class VehicleEvents(Events):
return event["facility"][id_start_position + 1 :] in local_links
return False
# TODO: here event["link"] gives keyerror. event is a list, most probably even other places that use this filter_expr with event["link"] are wrong
# search CTRL F, for places where they have event["link"] and use their code on how to get the event link ID, because local_links returns a list of ids
# TRY: self.network.get_link(event["link"]) and see what it does
filtered_events = self.timeslot_filter_events(timeslot)
for event in filter(filter_exp, filtered_events):
veh_id = event["vehicle"]
......@@ -518,70 +493,107 @@ class VehicleEvents(Events):
def pedestrian_travel_time(self, local_links: List[str] = None, timeslot=None):
if local_links is None:
def filter_exp(_event):
return _event["type"] == "departure" or _event["type"] == "arrival"
return (
(_event["type"] == "departure" or _event["type"] == "arrival") and
_event["legMode"] == "walk"
)
else:
def filter_exp(_event):
return (
_event["type"] == "departure" or _event["type"] == "arrival"
) and _event["link"] in local_links
persons = {}
pedestrianTripsByLinks = {}
(_event["type"] == "departure" or _event["type"] == "arrival") and
_event["legMode"] == "walk" and
_event["link"] in local_links)
filtered_events = self.timeslot_filter_events(timeslot)
trips_per_person = {}
for event in filter(filter_exp, filtered_events):
if event["type"] == "departure" and event["legMode"] == "walk":
if not event["person"] in persons:
persons[event["person"]] = []
persons[event["person"]].append(
if event["type"] == "departure":
if not event["person"] in trips_per_person:
trips_per_person[event["person"]] = []
trips_per_person[event["person"]].append(
{
"link_id": event["link"],
"begin time": event["time"],
"end time": 0,
"start_link_id": event["link"],
"begin_time": float(event["time"]),
"end_time": 0,
"end_link_id": -1
}
)
elif event["type"] == "arrival" and event["legMode"] == "walk":
elif event["type"] == "arrival":
if (
event["person"] in persons.keys()
and persons.get(event["person"])[-1]["end time"] == 0
event["person"] in trips_per_person.keys()
and trips_per_person.get(event["person"])[-1]["end_time"] == 0
):
persons[event["person"]][-1]["end time"] = event["time"]
pedestrian_time = {
"time": 0,
"count": 0,
"average": 0,
}
for _, person_trips in persons.items():
for trip in person_trips:
if trip["end time"] != 0:
# for calculatig average pedestrian time
pedestrian_time["count"] += 1
pedestrian_time["time"] += float(trip["end time"]) - float(
trip["begin time"]
)
# for calculating pedestrian time by link
if trip["link_id"] not in pedestrianTripsByLinks:
pedestrianTripsByLinks[trip["link_id"]] = []
pedestrianTripsByLinks[trip["link_id"]].append(trip)
avg_pedestrian_time = (
0
if pedestrian_time["count"] == 0
else pedestrian_time["time"] / pedestrian_time["count"]
)
# Calculate pedestrian time by link
pedestrianTimeByLink = {}
for link_id, link in pedestrianTripsByLinks.items():
num_of_trips = 0
pedestrian_time = 0
for trip in link:
if trip["end time"] != 0:
num_of_trips += 1
pedestrian_time += float(trip["end time"]) - float(
trip["begin time"]
)
pedestrianTimeByLink[link_id] = (
0 if num_of_trips == 0 else pedestrian_time / num_of_trips
)
return avg_pedestrian_time, pedestrianTimeByLink
trips_per_person[event["person"]][-1]["end_time"] = float(event["time"])
trips_per_person[event["person"]][-1]["end_link_id"] = event["link"]
trips = []
for _, p_trips in trips_per_person.items():
for trip in p_trips:
t_len = float(trip["end_time"]) - float(trip["begin_time"])
# if t_len < 3600: continue # TODO: Ignore trips under one hour
trip_data = {
"trip_duration": t_len,
"start_link": trip["start_link_id"],
"end_link": trip["end_link_id"]
}
trips.append(trip_data)
trips_start_end = {}
for trip in trips:
if trip["start_link"] not in trips_start_end.keys():
trips_start_end[trip["start_link"]] = {}
if trip["end_link"] not in trips_start_end[trip["start_link"]].keys():
trips_start_end[trip["start_link"]][trip["end_link"]] = []
trips_start_end[trip["start_link"]][trip["end_link"]].append(trip["trip_duration"])
links = set()
for inner_dict in trips_start_end.values():
for inner_key in inner_dict.keys():
links.add(inner_key)
links.update(trips_start_end.keys())
links = list(links)
matrix = pd.DataFrame(index=links, columns=links)
matrix.fillna(0, inplace=True)
m_counts = pd.DataFrame(index=links, columns=links)
m_counts.fillna(1, inplace=True)
for s_link in trips_start_end.keys():
for e_link in trips_start_end[s_link].keys():
trip_duration_sum = sum(trips_start_end[s_link][e_link])
if trip_duration_sum < 3600: continue
trip_duration_count = len(trips_start_end[s_link][e_link])
if trip_duration_count < 5: continue
trip_duration_sum = sum(trips_start_end[s_link][e_link])
matrix.loc[s_link, e_link] = matrix.loc[s_link, e_link] + trip_duration_sum
m_counts.loc[s_link, e_link] = m_counts.loc[s_link, e_link] + trip_duration_count
upper_triangle = np.triu(matrix.values, k=1)
lower_triangle = np.tril(matrix.values)
sum_matrix = lower_triangle + upper_triangle.T
matrix_unidirect = pd.DataFrame(sum_matrix, index=matrix.index, columns=matrix.columns)
matrix_unidirect.fillna(0, inplace=True)
upper_triangle = np.triu(m_counts.values, k=1)
lower_triangle = np.tril(m_counts.values)
sum_matrix = lower_triangle + upper_triangle.T
m_counts_unidirect = pd.DataFrame(sum_matrix, index=m_counts.index, columns=m_counts.columns)
m_counts_unidirect.fillna(1, inplace=True)
result = matrix_unidirect.div(m_counts_unidirect)
result = pd.DataFrame(result).fillna(0)
final_result = []
for col_key in result.columns:
for index_key in result.index:
dat = {
"start_link": col_key,
"end_link": index_key,
"average_time": result.loc[col_key, index_key]
}
final_result.append(dat)
average = matrix.sum().sum() / m_counts.sum().sum()
if pd.isna(average):
average = 0
return {"average_trip_duration": average, "detailed_long_trips": final_result}
def users_per_mode(self, local_links=None, timeslot=None):
agents = {}
......@@ -655,7 +667,6 @@ class VehicleEvents(Events):
return -1
return length / (total_time / vehicle_count)
# fixed
def get_congested_links(self, rush_hour, vehicle_mode=None, local_links=None):
if local_links is None:
if vehicle_mode is None:
......@@ -693,7 +704,7 @@ class VehicleEvents(Events):
# calculate times on each link
link_travel_times = {}
# {link_id: [dT, dT, ...], }
for link_id in link_event_times:
for link_id in link_event_times.keys():
if "pt" in link_id: continue
for veh_id in link_event_times[link_id]:
if link_event_times[link_id][veh_id].get("entered", None) is None \
......@@ -710,13 +721,13 @@ class VehicleEvents(Events):
# calculate ideal link times using speed and length attributes
ideal_travel_times = {}
for link in self.network.links:
if "pt" in link_id: continue
if "pt" in link["id"]: continue
ideal_time = float(link["length"]) / float(link["freespeed"])
ideal_travel_times[link["id"]] = ideal_time
# compare average times with ideal and report congestion
congestion_length_sum = 0
congested_links = []
congested_links = {}
for link_id in link_travel_times:
if ideal_travel_times[link_id] < 2.0: # causes errors due to time being measured with second resolution
......@@ -725,8 +736,8 @@ class VehicleEvents(Events):
if avg_time > (ideal_travel_times[link_id] * 1.1): # allow some slowness
if link_id not in congested_links:
congestion_length_sum += self.network.get_link_length_link(link_id)
congested_links.append(link_id)
logger.debug("congested length %s", congestion_length_sum)
congested_links[link_id] = {"average time": avg_time}
# logger.debug("congested length %s", congestion_length_sum)
return {"congestion_length": congestion_length_sum, "congested_links": congested_links}
def public_transport_use_geojson(self, local_links=None):
......@@ -801,7 +812,7 @@ class VehicleEvents(Events):
continue
data_by_link[item["link"]] = {
"pt_users": item["occ"],
"pt_vehicles": [veh_id],
# "pt_vehicles": [veh_id],
}
else:
data_by_link[item["link"]]["pt_users"] += item["occ"]
......@@ -887,9 +898,19 @@ class VehicleEvents(Events):
return 0
def share_bicycles(self, local_links=None, timeslot=None):
logger.debug("share of bicycles")
try:
return self.bicycle_use(local_links, timeslot) / self.trips_number
except:
results = self.vehicles_count(local_links=local_links, timeslot=timeslot)
# logger.debug(results)
divisor = (
(results["car_count"] + results["bike_count"] + results["bus_count"])
if local_links is not None else
self.trips_number
)
result = self.bicycle_use(local_links, timeslot) / divisor
# logger.debug(result)
return result
except ZeroDivisionError as e:
app.logger.warn("division by zero in share_bicycles")
return 0
......@@ -969,57 +990,53 @@ class EmissionEvents(Events):
filtered_events = self.timeslot_filter_events(timeslot)
emissions = {
"CO": 0.0,
# "CO": 0.0,
"CO2_TOTAL": 0.0,
"HC": 0.0,
# "HC": 0.0,
"NOx": 0.0,
"PM": 0.0,
"CO2_rep": 0.0,
# "CO2_rep": 0.0,
}
for event in filter(
lambda event: event["type"].find("missionEvent") >= 0, filtered_events
):
emissions["CO"] += float(event["CO"])
# emissions["CO"] += float(event["CO"])
emissions["CO2_TOTAL"] += float(event["CO2_TOTAL"])
emissions["HC"] += float(event["HC"])
# emissions["HC"] += float(event["HC"])
emissions["NOx"] += float(event["NOx"])
emissions["PM"] += float(event["PM"])
emissions["CO2_rep"] += float(event["CO2_rep"])
# emissions["CO2_rep"] += float(event["CO2_rep"])
return emissions
def total_emissions_by_link(self, input_links=None, timeslot=None):
if input_links is None:
def filter_exp(_event):
return _event
else:
def filter_exp(_event):
return (
_event["type"] == "departure" or _event["type"] == "arrival"
) and _event["link"] in input_links
filtered_events = self.timeslot_filter_events(timeslot)
links = {}
for event in filter(filter_exp, filtered_events):
if not event["linkId"] in links:
links[event["linkId"]] = {
"CO": 0.0,
# "CO": 0.0,
"CO2_TOTAL": 0.0,
"HC": 0.0,
# "HC": 0.0,
"NOx": 0.0,
"PM": 0.0,
"CO2_rep": 0.0,
# "CO2_rep": 0.0,
}
links[event["linkId"]]["CO"] += float(event["CO"])
# links[event["linkId"]]["CO"] += float(event["CO"])
links[event["linkId"]]["CO2_TOTAL"] += float(event["CO2_TOTAL"])
links[event["linkId"]]["HC"] += float(event["HC"])
# links[event["linkId"]]["HC"] += float(event["HC"])
links[event["linkId"]]["NOx"] += float(event["NOx"])
links[event["linkId"]]["PM"] += float(event["PM"])
links[event["linkId"]]["CO2_rep"] += float(event["CO2_rep"])
# links[event["linkId"]]["CO2_rep"] += float(event["CO2_rep"])
return links
def emissions_total_links_sum(self, local_links=None, timeslot=None):
......@@ -1034,20 +1051,20 @@ class EmissionEvents(Events):
return _event["linkId"] in local_links
emissions_sum = {
"CO": 0.0,
# "CO": 0.0,
"CO2_TOTAL": 0.0,
"HC": 0.0,
# "HC": 0.0,
"NOx": 0.0,
"PM": 0.0,
"CO2_rep": 0.0,
# "CO2_rep": 0.0,
}
filtered_events = self.timeslot_filter_events(timeslot)
for event in filter(filter_exp, filtered_events):
emissions_sum["CO"] += float(event["CO"])
# emissions_sum["CO"] += float(event["CO"])
emissions_sum["CO2_TOTAL"] += float(event["CO2_TOTAL"])
emissions_sum["HC"] += float(event["HC"])
# emissions_sum["HC"] += float(event["HC"])
emissions_sum["NOx"] += float(event["NOx"])
emissions_sum["PM"] += float(event["PM"])
emissions_sum["CO2_rep"] += float(event["CO2_rep"])
# emissions_sum["CO2_rep"] += float(event["CO2_rep"])
return emissions_sum
......@@ -26,13 +26,11 @@ class Network:
:param path: path to the network.xml file
:param type: string
:param crs_epsg: the coordinate reference system used.
Network files used by MATSim use CRS epsg 2062.
:param type: string
"""
self.city = city
# used for importing counting locations, which use a different CRS
if city == "bilbao":
self.crs_epsg = "32630"
elif city == "amsterdam":
......@@ -84,7 +82,16 @@ class Network:
elif child.tag == "links":
for link in child:
links.append(link.attrib)
link_attributes = link.attrib
# logger.debug("link %s", link.attrib)
for x in link:
# logger.debug("attributes %s", x.attrib)
for y in x:
# logger.debug("attribute %s", y.attrib)
if y.attrib["name"] == "osm:way:highway":
# logger.debug("text %s", y.text)
link_attributes["osm:way:highway"] = y.text
links.append(link_attributes)
self.nodes = nodes
self.links = links
......@@ -585,6 +592,7 @@ class Network:
"x": float(node["x"]),
"y": float(node["y"])
}
logger.debug("point: %s", point)
candidates = self.search_kdtree_range(
point, kdtree=self.link_kd_tree, delta=delta
)
......
......@@ -2,6 +2,7 @@ import os
import json
import jsonschema
import logging
import time
from typing import Union, List
from flask import current_app as app
from app.netedit.network import Network
......@@ -110,10 +111,12 @@ def getEventsFromKeywords(inputPath, outputPath, keywords: list):
output.close()
def get_network_and_events(simulation_id, local: Union[bool, List] = False):
def get_network_and_events(simulation_id):
"""
Returns the network and the events objects.
"""
start = time.time()
date = get_sim_dir_names([simulation_id])[0]
network_path = f"{data_dir}/simulations/{simulation_id}/network.xml"
events_path = (
......@@ -126,9 +129,9 @@ def get_network_and_events(simulation_id, local: Union[bool, List] = False):
network = Network(city=city, path=network_path)
import time
start = time.time()
events = VehicleEvents(path=events_path, network=network)
app.logger.warn(
......
<?xml version="1.0" encoding="UTF-8"?>
<DEXi>
<VERSION>5.05</VERSION>
<CREATED>2022-12-06T14:27:02</CREATED>
<CREATED>2023-04-03T09:52:25</CREATED>
<OPTION>a</OPTION>
<OPTION>b</OPTION>
<SETTINGS/>
......@@ -22,10 +22,11 @@
</SCALEVALUE>
</SCALE>
<FUNCTION>
<LOW>100001100022100110002111022211221002221122221</LOW>
<LOW>110001100022111110002210022211111112211122211</LOW>
<CONSIST>False</CONSIST>
</FUNCTION>
<OPTION>1</OPTION>
<OPTION>0</OPTION>
<OPTION>1</OPTION>
<ATTRIBUTE>
<NAME>Local</NAME>
<SCALE>
......@@ -45,8 +46,8 @@
<FUNCTION>
<LOW>001012122</LOW>
</FUNCTION>
<OPTION>0</OPTION>
<OPTION>0</OPTION>
<OPTION>1</OPTION>
<OPTION>1</OPTION>
<ATTRIBUTE>
<NAME>Local Pollution</NAME>
<SCALE>
......@@ -65,13 +66,13 @@
</SCALE>
<FUNCTION>
<LOW>2221122111211101110011000</LOW>
<ENTERED>+-----------+-----------+</ENTERED>
<ENTERED>+-----------------------+</ENTERED>
<WEIGHTS>50;50</WEIGHTS>
<LOCWEIGHTS>50.00;50.00</LOCWEIGHTS>
<NORMLOCWEIGHTS>50.00;50.00</NORMLOCWEIGHTS>
</FUNCTION>
<OPTION>1</OPTION>
<OPTION>0</OPTION>
<OPTION>1</OPTION>
<ATTRIBUTE>
<NAME>Local Emissions</NAME>
<SCALE>
......@@ -97,14 +98,14 @@
</SCALEVALUE>
</SCALE>
<FUNCTION>
<LOW>00112011221122212223222330112211222122232223322334112221222322233223342334412223222332233423344334442223322334233443344434444</LOW>
<ENTERED>+------------------------------------------------------------------------------------------------+++-------------------------</ENTERED>
<LOW>00111011121112211222122230111211122112221222322233111221122212223222332233311222122232223322333233341222322233223332333433344</LOW>
<ENTERED>+---------------------------------------------------------------------------------------------------------------------------+</ENTERED>
<WEIGHTS>33.3333333333333;33.3333333333333;33.3333333333333</WEIGHTS>
<LOCWEIGHTS>33.33;33.33;33.33</LOCWEIGHTS>
<NORMLOCWEIGHTS>33.33;33.33;33.33</NORMLOCWEIGHTS>
</FUNCTION>
<OPTION>1</OPTION>
<OPTION>4</OPTION>
<OPTION>3</OPTION>
<ATTRIBUTE>
<NAME>Local NOx</NAME>
<SCALE>
......@@ -211,8 +212,8 @@
<GROUP>GOOD</GROUP>
</SCALEVALUE>
</SCALE>
<OPTION>3</OPTION>
<OPTION>4</OPTION>
<OPTION>2</OPTION>
<OPTION>2</OPTION>
</ATTRIBUTE>
</ATTRIBUTE>
<ATTRIBUTE>
......@@ -238,7 +239,7 @@
<LOCWEIGHTS>50.00;50.00</LOCWEIGHTS>
<NORMLOCWEIGHTS>50.00;50.00</NORMLOCWEIGHTS>
</FUNCTION>
<OPTION>0</OPTION>
<OPTION>1</OPTION>
<OPTION>1</OPTION>
<ATTRIBUTE>
<NAME>Local Pedestrian travel time</NAME>
......@@ -264,8 +265,8 @@
<GROUP>GOOD</GROUP>
</SCALEVALUE>
</SCALE>
<OPTION>3</OPTION>
<OPTION>3</OPTION>
<OPTION>2</OPTION>
<OPTION>2</OPTION>
</ATTRIBUTE>
<ATTRIBUTE>
<NAME>Local Daily internal bike travels</NAME>
......@@ -291,8 +292,8 @@
<GROUP>GOOD</GROUP>
</SCALEVALUE>
</SCALE>
<OPTION>3</OPTION>
<OPTION>1</OPTION>
<OPTION>2</OPTION>
<OPTION>2</OPTION>
</ATTRIBUTE>
</ATTRIBUTE>
</ATTRIBUTE>
......@@ -313,10 +314,10 @@
</SCALEVALUE>
</SCALE>
<FUNCTION>
<LOW>011012122</LOW>
<LOW>001012122</LOW>
</FUNCTION>
<OPTION>1</OPTION>
<OPTION>2</OPTION>
<OPTION>1</OPTION>
<ATTRIBUTE>
<NAME>City-wide Pollution</NAME>
<SCALE>
......@@ -340,8 +341,8 @@
<LOCWEIGHTS>50.00;50.00</LOCWEIGHTS>
<NORMLOCWEIGHTS>50.00;50.00</NORMLOCWEIGHTS>
</FUNCTION>
<OPTION>0</OPTION>
<OPTION>2</OPTION>
<OPTION>1</OPTION>
<OPTION>1</OPTION>
<ATTRIBUTE>
<NAME>City-wide Acoustic pollution</NAME>
<SCALE>
......@@ -366,8 +367,8 @@
<GROUP>GOOD</GROUP>
</SCALEVALUE>
</SCALE>
<OPTION>4</OPTION>
<OPTION>0</OPTION>
<OPTION>2</OPTION>
<OPTION>2</OPTION>
</ATTRIBUTE>
<ATTRIBUTE>
<NAME>City-wide Emissions</NAME>
......@@ -394,14 +395,14 @@
</SCALEVALUE>
</SCALE>
<FUNCTION>
<LOW>00111011121112211223122330111211122112231223322333111221122312233223332333411223122332233323334333441223322333233343334433444</LOW>
<ENTERED>+-------------------------------------------------------------------------------------------------------------------------+++</ENTERED>
<LOW>00111011121112211222122230111211122112221222322233111221122212223222332233311222122232223322333233341222322233223332333433344</LOW>
<ENTERED>+---------------------------------------------------------------------------------------------------------------------------+</ENTERED>
<WEIGHTS>33.3333333333333;33.3333333333333;33.3333333333333</WEIGHTS>
<LOCWEIGHTS>33.33;33.33;33.33</LOCWEIGHTS>
<NORMLOCWEIGHTS>33.33;33.33;33.33</NORMLOCWEIGHTS>
</FUNCTION>
<OPTION>2</OPTION>
<OPTION>1</OPTION>
<OPTION>2</OPTION>
<ATTRIBUTE>
<NAME>City-wide NOx</NAME>
<SCALE>
......@@ -427,7 +428,7 @@
</SCALEVALUE>
</SCALE>
<OPTION>2</OPTION>
<OPTION>0</OPTION>
<OPTION>2</OPTION>
</ATTRIBUTE>
<ATTRIBUTE>
<NAME>City-wide PM10</NAME>
......@@ -453,7 +454,7 @@
<GROUP>GOOD</GROUP>
</SCALEVALUE>
</SCALE>
<OPTION>3</OPTION>
<OPTION>2</OPTION>
<OPTION>2</OPTION>
</ATTRIBUTE>
<ATTRIBUTE>
......@@ -480,8 +481,8 @@
<GROUP>GOOD</GROUP>
</SCALEVALUE>
</SCALE>
<OPTION>1</OPTION>
<OPTION>0</OPTION>
<OPTION>2</OPTION>
<OPTION>2</OPTION>
</ATTRIBUTE>
</ATTRIBUTE>
</ATTRIBUTE>
......@@ -508,7 +509,7 @@
<LOCWEIGHTS>50.00;50.00</LOCWEIGHTS>
<NORMLOCWEIGHTS>50.00;50.00</NORMLOCWEIGHTS>
</FUNCTION>
<OPTION>2</OPTION>
<OPTION>1</OPTION>
<OPTION>1</OPTION>
<ATTRIBUTE>
<NAME>City-wide Pedestrian travel time</NAME>
......@@ -534,18 +535,18 @@
<GROUP>GOOD</GROUP>
</SCALEVALUE>
</SCALE>
<OPTION>1</OPTION>
<OPTION>3</OPTION>
<OPTION>2</OPTION>
<OPTION>2</OPTION>
</ATTRIBUTE>
<ATTRIBUTE>
<NAME>City-wide Daily internal bike travels</NAME>
<SCALE>
<SCALEVALUE>
<NAME>+15%</NAME>
<NAME>-15%</NAME>
<GROUP>BAD</GROUP>
</SCALEVALUE>
<SCALEVALUE>
<NAME>+5%</NAME>
<NAME>- 5%</NAME>
<GROUP>BAD</GROUP>
</SCALEVALUE>
<SCALEVALUE>
......@@ -553,16 +554,16 @@
<DESCRIPTION>No change</DESCRIPTION>
</SCALEVALUE>
<SCALEVALUE>
<NAME>- 5%</NAME>
<NAME>+5%</NAME>
<GROUP>GOOD</GROUP>
</SCALEVALUE>
<SCALEVALUE>
<NAME>-15%</NAME>
<NAME>+15%</NAME>
<GROUP>GOOD</GROUP>
</SCALEVALUE>
</SCALE>
<OPTION>1</OPTION>
<OPTION>1</OPTION>
<OPTION>2</OPTION>
<OPTION>2</OPTION>
</ATTRIBUTE>
</ATTRIBUTE>
</ATTRIBUTE>
......@@ -590,8 +591,8 @@
<GROUP>GOOD</GROUP>
</SCALEVALUE>
</SCALE>
<OPTION>1</OPTION>
<OPTION>3</OPTION>
<OPTION>2</OPTION>
<OPTION>2</OPTION>
</ATTRIBUTE>
</ATTRIBUTE>
</DEXi>