diff --git a/src/app - copia.py b/src/app - copia.py
new file mode 100644
index 0000000000000000000000000000000000000000..e603f069b06ea5a8b769d9b10fdbfc4772636939
--- /dev/null
+++ b/src/app - copia.py	
@@ -0,0 +1,116 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+"""
+Created on: 16/01/2023
+@author: Andoni Aranguren Ubierna
+"""
+import sys
+print(sys.path)
+from src import constants
+
+import mysql.connector
+from flask import Flask, request, render_template
+
+from src import recommender, database, planner
+
+app = Flask(__name__)
+cnx = None
+
+
+@app.plan_detail("/")
+def index():
+    return render_template("index.html")
+
+
+# RECOMMENDERS: -------------------------------------------------------------------------------------------------------
+@app.get("/recommender/popularity")
+def recommender_popularity():
+    """
+    We ask for actions based on popularity of the actions
+    :return: json
+    """
+    return recommender.recommender_popularity(cnx, request)
+
+
+@app.get("/recommender/geographic/action_id")
+def recommender_geographic_id():
+    """
+    We ask for actions based on popularity of the actions
+    :return: json
+    """
+    return recommender.recommender_geographic_id(cnx, request)
+
+
+@app.get("/recommender/geographic/lat_lon")
+def recommender_geographic_lat_lon():
+    """
+    We ask for actions based on popularity of the actions
+    :return: json
+    """
+    return recommender.recommender_geographic_lat_lon(cnx, request)
+
+
+@app.get("/recommender/preferences")
+def recommender_preferences():
+    """
+    We ask for kpis based on the affinity to the kpi provided.
+    :return: json
+    """
+    return recommender.recommender_preferences(cnx, request)
+
+
+@app.get("/recommender/kpis/kpi_id")
+def recommender_kpis_kpi_id():
+    """
+    Provided a action it returns a set of kpis by their affinity regarding the provided action
+    :return: json
+    """
+    return recommender.recommender_kpis_kpi_id(cnx, request)
+
+
+@app.get("/recommender/kpis/action_id")
+def recommender_kpis_action_id():
+    """
+    Provided a action it returns a set of kpis by their affinity regarding the provided action
+    :return: json
+    """
+    return recommender.recommender_kpis_action_id(cnx, request)
+
+
+@app.get("/recommender/bayesian")
+def recommender_bayesian_action_id():
+    """
+    We ask for actions based on popularity of the actions
+    :return: json
+    """
+    return recommender.recommender_bayesian_action_id(cnx, request)
+
+
+# DATABASE SAVE: -------------------------------------------------------------------------------------------------------
+@app.post("/database/save/plan")
+def database_save_plan():
+    """
+    It stores the plan_details that have been selected.
+    The json provided should have the same format that /planner/plan_detail outputs filled with each plan_detail
+    :return: json with the plan_detail
+    """
+    if request.json:
+        return database.database_save_plan(cnx, request)
+    else:
+        return constants.ERROR_JSON_NEEDED
+
+
+# plan_detail PLANNER: -------------------------------------------------------------------------------------------------------
+@app.get("/planner/plan_detail")
+def planner_plan_detail():
+    """
+    It calculates the best plan_detail traversing streets and reordering the intermediate nodes provided.
+    :return: json with the plan_detail
+    """
+    return planner.planner_plan_detail(cnx, request)
+
+
+# MAIN: ----------------------------------------------------------------------------------------------------------------
+if __name__ == "__main__":
+    cnx = mysql.connector.connect(**constants.DDBB_CONFIG)
+    app.run(debug=True, host='0.0.0.0')
diff --git a/src/app.py b/src/app.py
new file mode 100644
index 0000000000000000000000000000000000000000..ce827045a18171e05a3453461a1881de8e2a4e1f
--- /dev/null
+++ b/src/app.py
@@ -0,0 +1,116 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+"""
+Created on: 16/01/2023
+@author: Andoni Aranguren Ubierna
+"""
+import sys
+print(sys.path)
+from src import constants
+
+import mysql.connector
+from flask import Flask, request, render_template
+
+from src import recommender, database, planner
+
+app = Flask(__name__)
+cnx = None
+
+
+@app.route("/")
+def index():
+    return render_template("index.html")
+
+
+# RECOMMENDERS: -------------------------------------------------------------------------------------------------------
+@app.get("/recommender/popularity")
+def recommender_popularity():
+    """
+    We ask for actions based on popularity of the actions
+    :return: json
+    """
+    return recommender.recommender_popularity(cnx, request)
+
+
+@app.get("/recommender/geographic/action_id")
+def recommender_geographic_id():
+    """
+    We ask for actions based on popularity of the actions
+    :return: json
+    """
+    return recommender.recommender_geographic_id(cnx, request)
+
+
+@app.get("/recommender/geographic/lat_lon")
+def recommender_geographic_lat_lon():
+    """
+    We ask for actions based on popularity of the actions
+    :return: json
+    """
+    return recommender.recommender_geographic_lat_lon(cnx, request)
+
+
+@app.get("/recommender/preferences")
+def recommender_preferences():
+    """
+    We ask for kpis based on the affinity to the kpi provided.
+    :return: json
+    """
+    return recommender.recommender_preferences(cnx, request)
+
+
+@app.get("/recommender/kpis/kpi_id")
+def recommender_kpis_kpi_id():
+    """
+    Provided a action it returns a set of kpis by their affinity regarding the provided action
+    :return: json
+    """
+    return recommender.recommender_kpis_kpi_id(cnx, request)
+
+
+@app.get("/recommender/kpis/action_id")
+def recommender_kpis_action_id():
+    """
+    Provided a action it returns a set of kpis by their affinity regarding the provided action
+    :return: json
+    """
+    return recommender.recommender_kpis_action_id(cnx, request)
+
+
+@app.get("/recommender/bayesian")
+def recommender_bayesian_action_id():
+    """
+    We ask for actions based on popularity of the actions
+    :return: json
+    """
+    return recommender.recommender_bayesian_action_id(cnx, request)
+
+
+# DATABASE SAVE: -------------------------------------------------------------------------------------------------------
+@app.post("/database/save/plan")
+def database_save_plan():
+    """
+    It stores the plan_details that have been selected.
+    The json provided should have the same format that /planner/plan_detail outputs filled with each plan_detail
+    :return: json with the plan_detail
+    """
+    if request.json:
+        return database.database_save_plan(cnx, request)
+    else:
+        return constants.ERROR_JSON_NEEDED
+
+
+# plan_detail PLANNER: -------------------------------------------------------------------------------------------------------
+@app.get("/planner/plan_detail")
+def planner_plan_detail():
+    """
+    It calculates the best plan_detail traversing streets and reordering the intermediate nodes provided.
+    :return: json with the plan_detail
+    """
+    return planner.planner_plan_detail(cnx, request)
+
+
+# MAIN: ----------------------------------------------------------------------------------------------------------------
+if __name__ == "__main__":
+    cnx = mysql.connector.connect(**constants.DDBB_CONFIG)
+    app.run(debug=True, host='0.0.0.0')
diff --git a/src/constants.py b/src/constants.py
new file mode 100644
index 0000000000000000000000000000000000000000..34f30f9d44644f2979980a3cad897e80dca83e77
--- /dev/null
+++ b/src/constants.py
@@ -0,0 +1,78 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+"""
+Created on: 16/01/2023
+@author: Andoni Aranguren Ubierna
+"""
+DDBB_CONFIG = {'user': 'root', 'password': 'admin', 'host': 'localhost', 'database': 'urbanite_recommender', 'autocommit': True}
+OTP_CONFIG = {'host': 'https://afrp.santander.urbanage.digital.tecnalia.dev/'}
+
+ERROR_EXCEPTION = [{"error": "Upss... Something went wrong"}, 415]
+ERROR_JSON_NEEDED = [{"error": "Request must be JSON"}, 415]
+ERROR_plan_DATE_MISSING = [{"error": " ".join(["No date was found in the plan.",
+                                                      "That's weird, did you do it by hand? Use /planner/plan_detail"])}, 415]
+ERROR_INVALID_action_ID = [{"error": "At least one of the provided action id are not found in the database"}, 415]
+ALL_RIGHT = [{"All right!": "The request has been completed correctly"}, 200]
+
+DEFAULT_PREFERENCES = ["sports", "culture", "sightseeing", "gastronomy"]
+DEFAULT_TIME_SLOTS = ["08:30", "12:30", "15:30", "19:30", "23:30"]
+DEFAULT_INSERT_plan_detail_ORDER = ["plan_id", "time_slot", "fecha_hora", "arrive_by", "wheelchair", "max_walk_distance",
+                              "from_lat", "from_lon", "to_lat", "to_lon",
+                              "sports", "culture", "sightseeing", "gastronomy",
+                              "action_id_array"]
+REGEXP_COORDS = r"^\-?[0-9]+\.[0-9]+,\-?[0-9]+\.[0-9]+$"
+REGEXP_DATE = r"^2[0-9][0-9][0-9]-[0-1][0-9]-[0-3][0-9] [0-2][0-9]:[0-5][0-9]:[0-5][0-9]$"
+REGEXP_INTERMEDIATE_NODES = r"^[0-9]+(?:,[0-9]+)*$"
+REGEXP_TIME = r"^[0-2][0-9]:[0-5][0-9]$"
+
+DEFAULT_SHIFTING = 0.0001
+DEFAULT_SHIFTING_AMOUNT = 1.01
+DEFAULT_MAX_TRIES_TO_RELOCATE_ENDING_actionNT = 100
+DEFAULT_OTP_TOO_CLOSE = 'ES-los Origin is within a trivial distance of the destination.'
+
+plan_JSON_TEMPLATE = dict(date=None, plan_details={0: {},  # Morning plan_detail
+                                                  1: {},  # Lunch brake or pintxos plan_detail
+                                                  2: {},  # Afternoon plan_detail
+                                                  3: {}})  # Dinner brake or pintxos plan_detail
+
+# Load data
+PATH_plan_detailS = "data/Rutas"
+PATH_actionS = "data/actions"
+
+# Load data - SQL queries
+sql_insert_action = """
+INSERT IGNORE INTO action(action_id, lat, lon, action_name) 
+VALUES (%s, %s, %s, %s)
+"""
+
+sql_insert_kpi = """INSERT IGNORE INTO kpi(kpi_name) VALUES(%s)"""
+
+sql_get_kpi_ids = """SELECT kpi_name, kpi_id FROM kpi WHERE kpi_name IN ('{names}')"""
+
+sql_insert_action_kpi = """
+INSERT IGNORE INTO kpi_action(action_id, kpi_id)
+VALUES ( (SELECT action_id FROM action WHERE lat = %s AND lon = %s AND action_name = %s),
+         {kpi})
+"""
+
+sql_insert_plan = """
+INSERT IGNORE INTO plan(plan_date)
+VALUES(%s);
+"""
+
+sql_insert_plan_detail = """
+INSERT IGNORE INTO plan_detail(plan_id, time_slot, 
+                        from_lat, from_lon, to_lat, to_lon,
+                        fecha_hora, sports, culture, sightseeing, gastronomy)
+VALUES((SELECT plan_id FROM plan WHERE plan_date = '{date}'), %s, 
+        43.2668, -2.9353, 43.2600, -2.9393,
+        %s, %s, %s, %s, %s)
+"""
+
+sql_get_chosen_action_ids = """SELECT action_name, action_id FROM action WHERE action_name LIKE %s"""
+
+sql_insert_chosen_action = """
+INSERT IGNORE INTO CHOSEN_action(plan_id, time_slot, action_id)
+VALUES((SELECT plan_id FROM plan WHERE plan_date = '{date}'), %s, %s)
+"""
+
diff --git a/src/database.py b/src/database.py
new file mode 100644
index 0000000000000000000000000000000000000000..61c20eeb48475ce136fa1b56d19172e9e73b704f
--- /dev/null
+++ b/src/database.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+"""
+Created on: 16/01/2023
+@author: Andoni Aranguren Ubierna
+"""
+from src import utils, constants
+
+
+def __database_save_plan_detail__(cnx, otp_parameters, DDBB_params, intermediate_nodes):
+    if intermediate_nodes:
+        response, errors = utils.get_best_order_plan_detail(otp_parameters, intermediate_nodes)
+        if errors:
+            return errors
+
+        DDBB_params["action_id_array"] = response["requestParameters"]["bestOrder"]
+        if response is not None:
+            query_params = [DDBB_params[x] for x in constants.DEFAULT_INSERT_plan_detail_ORDER]
+            s_parameters_str = ', '.join(['%s'] * len(query_params))
+            _ = utils.execute_query(cnx, f"CALL insert_plan_detail({s_parameters_str});", query_params, no_return=True)
+
+
+def database_save_plan(cnx, request):
+    """
+    It stores the plan_details that have been selected.
+    :return: json with the plan_detail
+    """
+    plan = request.json
+    date = plan["date"]
+    if date is None:
+        return constants.ERROR_plan_DATE_MISSING
+
+    # Comprobar que todos los IDs existen en la bbdd
+    action_id = []
+    for plan_details in [x for x in plan["plan_details"].values() if x]:
+        action_id += plan_details["requestParameters"]["bestOrder"].split(",")
+
+    action_id = list({int(d): d for d in action_id if d}.values())  # Remove duplicates
+
+    if not utils.check_valid_action_ids(cnx, action_id):
+        return constants.ERROR_INVALID_action_ID
+
+    plan_id, error = utils.generate_plan(cnx, date)
+    if error:
+        return constants.ERROR_plan_DATE_MISSING
+    # Lanzar contra OTP para volver a conseguir el mejor orden
+    for time_slot in plan["plan_details"].keys():
+        plan_detail = plan["plan_details"][time_slot]
+        if plan_detail:
+            plan_detail["requestParameters"]["intermediateNodes"] = None
+            intermediate_nodes = None
+            if "bestOrder" in plan_detail["requestParameters"]:
+                intermediate_nodes_ids_str = plan_detail["requestParameters"]["bestOrder"]
+                # Lanzar contra OTP para volver a conseguir el mejor orden
+                intermediate_nodes, intermediate_nodes_otp = utils.get_intermediate_nodes(cnx,
+                                                                                          intermediate_nodes_ids_str)
+                plan_detail["requestParameters"]["intermediateNodes"] = intermediate_nodes_otp
+            plan_detail["DDBBParameters"]["plan_id"] = plan_id
+            __database_save_plan_detail__(cnx,
+                                    plan_detail["requestParameters"],
+                                    plan_detail["DDBBParameters"],
+                                    intermediate_nodes)
+    return constants.ALL_RIGHT
diff --git a/src/load_data - copia.py b/src/load_data - copia.py
new file mode 100644
index 0000000000000000000000000000000000000000..8ec5476b9ef49e031146cd7c18452f045c2b0f33
--- /dev/null
+++ b/src/load_data - copia.py	
@@ -0,0 +1,123 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+"""
+Created on: 16/01/2023
+@author: Andoni Aranguren Ubierna
+"""
+import os
+
+import sys
+import numpy as np
+import pandas as pd
+import requests
+from mysql import connector
+from utm.conversion import to_latlon as utm_to_latlon
+
+import sys
+print(sys.path)
+from src import constants
+
+
+def to_latlon(df, x_str, y_str):
+    x, y = [], []
+
+    for index, row in df.iterrows():
+        coords_portal = utm_to_latlon(row[x_str], row[y_str], zone_number=30, zone_letter="T")
+        x += [coords_portal[0]]
+        y += [coords_portal[1]]
+    df["lon"] = np.round(x, 8)
+    df["lat"] = np.round(y, 8)
+
+    return df
+
+
+def generate_POIs(cnx):
+    cursor = cnx.cursor(buffered=True)
+    path = constants.PATH_POIS
+
+    file = path + "/POIS_LugaresTuristicos_Bilbao.csv"
+    if os.path.isfile(file):
+        df = pd.read_csv(file, encoding="utf-8", sep=";", decimal=".", encoding_errors='replace')
+        df = to_latlon(df, "COORDENADA_UTM_X", "COORDENADA_UTM_Y")
+        df.drop_duplicates(subset=["lon", "lat", "NOMBRE_LUGAR_CAS"], inplace=True)
+        valores_preferencias = set([i for x in df["TAGS"].unique().tolist() for i in x.split(",")])
+
+        df_sql = df[["ID", "lon", "lat", "NOMBRE_LUGAR_CAS"]]
+        cursor.executemany(constants.sql_insert_poi, df_sql.values.tolist())
+        cnx.commit()
+        for tag in valores_preferencias:
+            cursor.execute(constants.sql_insert_tag, [tag])
+        cnx.commit()
+
+        names_dict = {}
+        for tag in valores_preferencias:
+            names_dict[tag] = ""
+
+        cursor.execute(constants.sql_get_tag_ids.format(names="','".join(names_dict.keys())), )
+        cnx.commit()
+        ret = cursor.fetchall()
+
+        for row in ret:
+            names_dict[row[0]] = row[1]
+
+        for index, row in df.iterrows():
+            for tag in row["TAGS"].split(","):
+                cursor.execute(constants.sql_insert_poi_tag.format(tag=names_dict[tag]),
+                               row[["lon", "lat", "NOMBRE_LUGAR_CAS"]].values.tolist())
+        cnx.commit()
+
+
+def generate_routes(cnx):
+    cursor = cnx.cursor(buffered=True)
+    a, b = 'áéíóúüÁÉÍÓÚÜ', '%%%%%%%%%%%%'
+    trans = str.maketrans(a, b)
+    path = constants.PATH_ROUTES
+    for filename in os.listdir(path):
+        f = os.path.join(path, filename)
+        if os.path.isfile(f):
+            df = pd.read_csv(f, sep="\t")
+            json_itinerary = None
+            for index, row in df.iterrows():
+                names = row["poi_names"].replace(";", "','")
+                dict_id = {}
+                for name in row["poi_names"].split(";"):
+                    cursor.execute(constants.sql_get_chosen_poi_ids,[name.translate(trans)])
+                    cnx.commit()
+                    ret = cursor.fetchall()
+                    try:
+                        dict_id[ret[0][0]] = str(ret[0][1])
+                    except:
+                        pass
+
+                poi_id = ",".join(list(dict_id.values()))
+
+                url_route = 'http://localhost:5000/planner/route?'
+                parameters = "&".join(["fromPlace=" + row["from_place"],
+                                       "toPlace=" + row["to_place"],
+                                       "timeSlot=" + str(int(row["time_slot"])),
+                                       "sports=" + str(row["sports"]),
+                                       "culture=" + str(row["culture"]),
+                                       "sightseeing=" + str(row["sightseeing"]),
+                                       "gastronomy=" + str(row["gastronomy"])])
+                if poi_id != '':
+                    parameters += "&intermediateNodesIds=" + poi_id
+
+                if json_itinerary is None:
+                    json_itinerary = requests.get(url_route+parameters).json()
+                else:
+                    time_slot = str(int(row["time_slot"]))
+                    json_itinerary["routes"][time_slot] = requests.get(url_route+parameters).json()["routes"][time_slot]
+            url_save_it = 'http://localhost:5000/database/save/itinerary'
+            requests.post(url_save_it, json=json_itinerary)
+
+
+def main(ddbb_config: dict):
+    cnx = connector.connect(**ddbb_config)
+    generate_POIs(cnx)
+    generate_routes(cnx)
+    cnx.close()
+
+
+if __name__ == '__main__':
+    ddbb_config = constants.DDBB_CONFIG
+    main(ddbb_config)
diff --git a/src/load_data.py b/src/load_data.py
new file mode 100644
index 0000000000000000000000000000000000000000..e9ed3d54fb98aad745ecc0743015e4694a93abb8
--- /dev/null
+++ b/src/load_data.py
@@ -0,0 +1,122 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+"""
+Created on: 16/01/2023
+@author: Andoni Aranguren Ubierna
+"""
+import os
+
+import sys
+import numpy as np
+import pandas as pd
+import requests
+from mysql import connector
+from utm.conversion import to_latlon as utm_to_latlon
+
+import sys
+print(sys.path)
+from src import constants
+
+
+def to_latlon(df, x_str, y_str):
+    x, y = [], []
+
+    for index, row in df.iterrows():
+        coords_portal = utm_to_latlon(row[x_str], row[y_str], zone_number=30, zone_letter="T")
+        x += [coords_portal[0]]
+        y += [coords_portal[1]]
+    df["lon"] = np.round(x, 8)
+    df["lat"] = np.round(y, 8)
+
+    return df
+
+
+def generate_actions(cnx):
+    cursor = cnx.cursor(buffered=True)
+    path = constants.PATH_actionS
+
+    file = path + "/actionS_LugaresTuristicos_Bilbao.csv"
+    if os.path.isfile(file):
+        df = pd.read_csv(file, encoding="utf-8", sep=";", decimal=".", encoding_errors='replace')
+        df = to_latlon(df, "COORDENADA_UTM_X", "COORDENADA_UTM_Y")
+        df.drop_duplicates(subset=["lon", "lat", "NOMBRE_LUGAR_CAS"], inplace=True)
+        valores_preferencias = set([i for x in df["kpiS"].unique().tolist() for i in x.split(",")])
+
+        df_sql = df[["ID", "lon", "lat", "NOMBRE_LUGAR_CAS"]]
+        cursor.executemany(constants.sql_insert_action, df_sql.values.tolist())
+        cnx.commit()
+        for kpi in valores_preferencias:
+            cursor.execute(constants.sql_insert_kpi, [kpi])
+        cnx.commit()
+
+        names_dict = {}
+        for kpi in valores_preferencias:
+            names_dict[kpi] = ""
+
+        cursor.execute(constants.sql_get_kpi_ids.format(names="','".join(names_dict.keys())), )
+        cnx.commit()
+        ret = cursor.fetchall()
+
+        for row in ret:
+            names_dict[row[0]] = row[1]
+
+        for index, row in df.iterrows():
+            for kpi in row["kpiS"].split(","):
+                cursor.execute(constants.sql_insert_action_kpi.format(kpi=names_dict[kpi]),
+                               row[["lon", "lat", "NOMBRE_LUGAR_CAS"]].values.tolist())
+        cnx.commit()
+
+
+def generate_plan_details(cnx):
+    cursor = cnx.cursor(buffered=True)
+    a, b = 'áéíóúüÁÉÍÓÚÜ', '%%%%%%%%%%%%'
+    trans = str.maketrans(a, b)
+    path = constants.PATH_plan_detailS
+    for filename in os.listdir(path):
+        f = os.path.join(path, filename)
+        if os.path.isfile(f):
+            df = pd.read_csv(f, sep="\t")
+            json_plan = None
+            for index, row in df.iterrows():
+                names = row["action_names"].replace(";", "','")
+                dict_id = {}
+                for name in row["action_names"].split(";"):
+                    cursor.execute(constants.sql_get_chosen_action_ids,[name.translate(trans)])
+                    cnx.commit()
+                    ret = cursor.fetchall()
+                    try:
+                        dict_id[ret[0][0]] = str(ret[0][1])
+                    except:
+                        pass
+
+                action_id = ",".join(list(dict_id.values()))
+
+                url_plan_detail = 'http://localhost:5000/planner/plan_detail?'
+                parameters = "&".join(["fromPlace=" + row["from_place"],
+                                       "toPlace=" + row["to_place"],
+                                       "timeSlot=" + str(int(row["time_slot"])),
+                                       "sports=" + str(row["sports"]),
+                                       "culture=" + str(row["culture"]),
+                                       "sightseeing=" + str(row["sightseeing"]),
+                                       "gastronomy=" + str(row["gastronomy"])])
+                if action_id != '':
+                    parameters += "&intermediateNodesIds=" + action_id
+
+                if json_plan is None:
+                    json_plan = requests.get(url_plan_detail+parameters).json()
+                else:
+                    time_slot = str(int(row["time_slot"]))
+                    json_plan["plan_details"][time_slot] = requests.get(url_plan_detail+parameters).json()["plan_details"][time_slot]
+            url_save_it = 'http://localhost:5000/database/save/plan'
+            requests.post(url_save_it, json=json_plan)
+
+
+def main(ddbb_config: dict):
+    cnx = connector.connect(**ddbb_config)
+    generate_actions(cnx)
+    cnx.close()
+
+
+if __name__ == '__main__':
+    ddbb_config = constants.DDBB_CONFIG
+    main(ddbb_config)
diff --git a/src/planner.py b/src/planner.py
new file mode 100644
index 0000000000000000000000000000000000000000..75687e11db312b3b0f7c20c46df975e09f638864
--- /dev/null
+++ b/src/planner.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+"""
+Created on: 16/01/2023
+@author: Andoni Aranguren Ubierna
+"""
+import time
+import datetime
+
+from src import utils, constants
+
+import random as rnd
+
+
+def plan_similar_plan_detail_shifting_end_actionnt(otp_parameters, intermediate_nodes, DDBB_params):
+    original_place = otp_parameters["toPlace"].split(",")
+    shifting = constants.DEFAULT_SHIFTING
+    utils.get_best_order_plan_detail(otp_parameters, intermediate_nodes)
+    for tries in range(0, constants.DEFAULT_MAX_TRIES_TO_RELOCATE_ENDING_actionNT):
+        new_place = [str(float(original_place[0]) + shifting * rnd.randint(-10, 10)),
+                     str(float(original_place[1]) + shifting * rnd.randint(-10, 10))]
+        otp_parameters["toPlace"] = ",".join([new_place[0], new_place[1]])
+        DDBB_params["to_lat"] = new_place[0]
+        DDBB_params["to_lon"] = new_place[1]
+        time.sleep(0.01)
+        response, errors = utils.get_best_order_plan_detail(otp_parameters, intermediate_nodes)
+        shifting *= constants.DEFAULT_SHIFTING_AMOUNT
+        if response:
+            return response, None
+    return None, constants.ERROR_EXCEPTION
+
+
+def planner_plan_detail(cnx, request):
+    # Generate the plan json structure
+    plan_json = constants.plan_JSON_TEMPLATE
+    plan_json["date"] = datetime.date.today().strftime("%Y-%m-%d")
+
+    # Read all parameters to call otp and also so we can store things into DDBB
+    otp_parameters, DDBB_params, intermediate_nodes, missing_params = utils.get_otp_ddbb_parameters(cnx, request.args)
+    if missing_params:
+        return [{"error": "Missing or incorrectly formatted params: " + missing_params}, 415]
+
+    response, errors = utils.get_best_order_plan_detail(otp_parameters, intermediate_nodes)
+    if errors and errors["error"]["msg"] == constants.DEFAULT_OTP_TOO_CLOSE:
+        response, errors = plan_similar_plan_detail_shifting_end_actionnt(otp_parameters, intermediate_nodes, DDBB_params)
+
+    if errors:
+        return constants.ERROR_EXCEPTION
+
+    # Include the DDBB parameters so we can later on save everything easily in the DDBB
+    response["DDBBParameters"] = DDBB_params
+    plan_json["plan_details"][DDBB_params["time_slot"]] = response
+
+    return plan_json
\ No newline at end of file
diff --git a/src/recommender.py b/src/recommender.py
new file mode 100644
index 0000000000000000000000000000000000000000..7c8dc84f15fac3cbd883fb47ccc7afaa6f5a1d5b
--- /dev/null
+++ b/src/recommender.py
@@ -0,0 +1,173 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+"""
+Created on: 16/01/2023
+@author: Andoni Aranguren Ubierna
+"""
+import math
+
+import sys
+print(sys.path)
+from src import constants, utils
+
+
+def recommender_popularity(cnx, request):
+    """
+    We ask for actions based on popularity of the actions
+    :return: json
+    """
+    try:
+        kpi = request.args.get("kpi", default=None, type=int)
+        ret = utils.execute_query(cnx, "CALL recommender_popularity(%s);", [kpi])
+        ret_json = {"actions": []}
+        for row in ret:
+            ret_json["actions"] += [{"action_id": row[0],
+                                  "action_name": row[1],
+                                  "Longitud": row[2],
+                                  "Latitud": row[3],
+                                  "kpi_id_list": row[4],
+                                  "kpi_name_list": row[5],
+                                  "Popularity": row[6]}]
+        return ret_json
+    except:
+        return constants.ERROR_EXCEPTION
+
+
+def recommender_geographic_id(cnx, request):
+    """
+    We ask for actions based on popularity of the actions
+    :return: json
+    """
+    action = request.args.get("action", type=int)
+    if action is None:
+        return [{"error": "A action id must be provided as an int"}, 415]
+    try:
+        kpi = request.args.get("kpi", default=None, type=int)
+        ret = utils.execute_query(cnx, "CALL recommender_geographic_action_id(%s, %s);", [action, kpi])
+        ret_json = {"actions": []}
+        for row in ret:
+            ret_json["actions"] += [{"action_id": row[0],
+                                  "action_name": row[1],
+                                  "Longitud": row[2],
+                                  "Latitud": row[3],
+                                  "kpi_id_list": row[4],
+                                  "kpi_name_list": row[5],
+                                  "Distance_in_meters": math.sqrt(row[6])}]
+        return ret_json
+    except:
+        return constants.ERROR_EXCEPTION
+
+
+def recommender_geographic_lat_lon(cnx, request):
+    """
+    We ask for actions based on popularity of the actions
+    :return: json
+    """
+    lat = request.args.get("lat", type=float)
+    lon = request.args.get("lon", type=float)
+    if lat is None or lon is None:
+        return [{"error": "Both lat and lon parameter should be provided as floats"}, 415]
+    try:
+        kpi = request.args.get("kpi", default=None, type=int)
+        ret = utils.execute_query(cnx, "CALL recommender_geographic_lat_lon(%s, %s, %s);", [lat, lon, kpi])
+        ret_json = {"actions": []}
+        for row in ret:
+            ret_json["actions"] += [{"action_id": row[0],
+                                  "action_name": row[1],
+                                  "Longitud": row[2],
+                                  "Latitud": row[3],
+                                  "kpi_id_list": row[4],
+                                  "kpi_name_list": row[5],
+                                  "Distance_squared": row[6]}]
+        return ret_json
+    except:
+        return constants.ERROR_EXCEPTION
+
+
+def recommender_preferences(cnx, request):
+    """
+    We ask for kpis based on the affinity to the kpi provided.
+    :return: json
+    """
+    preferences, errors = utils.get_preferences_mandatory(request.args)
+    if errors:
+        return errors
+    params = preferences + [request.args.get("kpi", default=None, type=int)]
+    try:
+        ret = utils.execute_query(cnx, "CALL recommender_preferences(%s,%s,%s,%s,%s);", params)
+        ret_json = {"actions": []}
+        for row in ret:
+            ret_json["actions"] += [{"action_id": row[0],
+                                  "action_name": row[1],
+                                  "Longitud": row[2],
+                                  "Latitud": row[3],
+                                  "kpi_id_list": row[4],
+                                  "kpi_name_list": row[5],
+                                  "Count": row[6]}]
+        return ret_json
+    except:
+        return constants.ERROR_EXCEPTION
+
+
+def recommender_kpis_kpi_id(cnx, request):
+    """
+    Provided a action it returns a set of kpis by their affinity regarding the provided action
+    :return: json
+    """
+    try:
+        kpi = [request.args.get("kpi", type=int)]
+        ret = utils.execute_query(cnx, "CALL recommender_kpis_kpi_id(%s);", kpi)
+        ret_json = {"kpis": []}
+        for row in ret:
+            ret_json["kpis"] += [{"kpi_id": row[0],
+                                  "kpi_name": row[1],
+                                  "Count": row[2],
+                                  "Popularity": row[3],
+                                  "Probability": row[4]}]
+        return ret_json
+    except:
+        return constants.ERROR_EXCEPTION
+
+
+def recommender_kpis_action_id(cnx, request):
+    """
+    Provided a action it returns a set of kpis by their affinity regarding the provided action
+    :return: json
+    """
+    try:
+        action = [request.args.get("action", type=str)]
+        ret = utils.execute_query(cnx, "CALL recommender_kpis_action_id(%s);", action)
+        ret_json = {"kpis": []}
+        for row in ret:
+            ret_json["kpis"] += [{"kpi_id": row[0],
+                                  "kpi_name": row[1],
+                                  "Count": row[2],
+                                  "Popularity": row[3],
+                                  "Probability": row[4]}]
+        return ret_json
+    except:
+        return constants.ERROR_EXCEPTION
+
+
+def recommender_bayesian_action_id(cnx, request):
+    """
+    We ask for actions based on popularity of the actions
+    :return: json
+    """
+    try:
+        actions = request.args.get("actions", type=str)
+        kpi = request.args.get("kpi", default=None, type=int)
+        ret = utils.execute_query(cnx, "CALL recommender_bayesian_action_id(%s, %s);", [actions, kpi])
+        ret_json = {"actions": []}
+        for row in ret:
+            ret_json["actions"] += [{"action_id": row[0],
+                                  "action_name": row[1],
+                                  "Longitud": row[2],
+                                  "Latitud": row[3],
+                                  "kpi_id_list": row[4],
+                                  "kpi_name_list": row[5],
+                                  "Popularity": row[6]}]
+        return ret_json
+    except:
+        return constants.ERROR_EXCEPTION
+    
\ No newline at end of file
diff --git a/src/utils.py b/src/utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..6fdce5e8ee9386c4035a38ab8eea7be2cb5633da
--- /dev/null
+++ b/src/utils.py
@@ -0,0 +1,299 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+"""
+Created on: 16/01/2023
+@author: Andoni Aranguren Ubierna
+"""
+import sys
+print(sys.path)
+from src import constants
+
+import numpy as np
+from mysql import connector
+import json
+import requests
+import datetime
+import re
+
+
+def call_otp(params):
+    try:
+        response = requests.request("GET", constants.OTP_CONFIG["host"], params=params)
+    except ConnectionError as e:
+        raise e
+    return json.loads(response.content)
+
+
+def execute_query(cnx, query: str, arguments=None, no_return=False):
+    if not cnx.is_connected():
+        print("Not connected, connecting...")
+        cnx = connector.connect(**constants.DDBB_CONFIG)
+    try:
+        print(f"Executing query {query} with params {arguments}...")
+        cursor = cnx.cursor(buffered=True)
+        cursor.execute(query, arguments)
+        if no_return:
+            return None
+        ret = cursor.fetchall()
+        cursor.close()
+        return ret
+    except Exception as e:
+        print(f"An exception has ocurred: {e}")
+        try:
+            cursor.close()
+        except:
+            pass
+        raise e
+
+
+def get_time_slot_default_start_time(time_slot: int):
+    return constants.DEFAULT_TIME_SLOTS[time_slot]
+
+
+def get_time_slot_default_arrive_by(time_slot: int):
+    return constants.DEFAULT_TIME_SLOTS[time_slot + 1]
+
+
+def get_actions_from_str(intermediate_nodes_ids_str):
+    ret = [int(x) for x in intermediate_nodes_ids_str.split(",") if x]
+    ret.sort()
+    return ret
+
+
+def simplify_by_distance(intermediateNodes):
+    """
+    We need to simplify the nodes cause if they are too close together OTP doesn't plan_detail through them
+    :param intermediateNodes: Array of clusters of [action_id, lat, lng], at first each cluster will have a single action
+    :return:
+    """
+    # We parse meters to radians
+    # Radians to meters
+    rads_2_meters = 1 / 0.000008998719243599958
+    min_dist = 250
+    if len(intermediateNodes) == 1:
+        return intermediateNodes
+
+    cluster_x, cluster_y = [], []
+    for clusters in intermediateNodes:
+        cluster_x += [float(np.mean([x[1] for x in clusters]))]
+        cluster_y += [float(np.mean([x[2] for x in clusters]))]
+
+    for i in range(len(intermediateNodes)):
+        for j in range(i + 1, len(intermediateNodes)):
+            dist = np.sqrt(pow(cluster_x[i] - cluster_x[j], 2) + pow(cluster_y[i] - cluster_y[j], 2)) * rads_2_meters
+            if dist <= min_dist:
+                new_intermediate_nodes = [x for x in intermediateNodes]
+                new_intermediate_nodes.remove(intermediateNodes[i])
+                new_intermediate_nodes.remove(intermediateNodes[j])
+                aux_nodes = []
+                for node in intermediateNodes[i]:
+                    aux_nodes.append(node)
+                for node in intermediateNodes[j]:
+                    aux_nodes.append(node)
+                new_intermediate_nodes.append(aux_nodes)
+                return simplify_by_distance(new_intermediate_nodes)
+    return intermediateNodes
+
+
+def get_intermediate_nodes(cnx, intermediate_nodes_ids_str):
+    intermediate_nodes_simplify, intermediate_nodes_simplify_otp = [], []
+    if intermediate_nodes_ids_str:
+        intermediate_nodes_ids = get_actions_from_str(intermediate_nodes_ids_str)
+        intermediate_nodes_latlng = execute_query(cnx,
+            f"SELECT action_id, lat, lon FROM action WHERE action_id in ({intermediate_nodes_ids_str})", )
+        if len(intermediate_nodes_ids) != len(intermediate_nodes_latlng):
+            found_ids = [x[0] for x in intermediate_nodes_latlng]
+            missing_ids = [x for x in intermediate_nodes_ids if x not in found_ids]
+            errors = f"Some ids ({str(missing_ids)}) of intermediateNodesIds are not in the database"
+            return None, None, errors
+
+        intermediate_nodes = {}
+        for action_node in intermediate_nodes_latlng:
+            intermediate_nodes[action_node[0]] = f"LatLng({str(action_node[2])},{str(action_node[1])})"
+        intermediate_nodes_simplify = simplify_by_distance([[x] for x in intermediate_nodes_latlng])
+        for clusters in intermediate_nodes_simplify:
+            mean_x = [float(np.mean([x[1] for x in clusters]))]
+            mean_y = [float(np.mean([x[2] for x in clusters]))]
+            intermediate_nodes_simplify_otp += [f"LatLng({str(mean_x[0])},{str(mean_y[0])})"]
+    return intermediate_nodes_simplify, intermediate_nodes_simplify_otp, None
+
+
+def get_mandatory_param(args, param, value_type, missing_params_p, regexp=None):
+    if missing_params_p is None:
+        missing_params_p = ""
+    value = None
+    try:
+        value = args.get(param, type=value_type)
+        if type is str:
+            value = re.search(regexp, value)[0]
+    except:
+        value = None
+
+    if value is None:
+        text = param + ("" if regexp is None else " (regex is '"+regexp+"')")
+        if missing_params_p is None:
+            missing_params_p = text
+        else:
+            missing_params_p += ", " + text
+
+    return value
+
+
+def get_optional_param(args, param, badly_formatted_params_p, regexp=None):
+    value = args.get(param, type=type)
+    if value is not None:
+        return get_mandatory_param(args, param, str, badly_formatted_params_p, regexp)
+    else:
+        return None
+
+
+def get_coord_attribute(args, atribute_st, missing_params):
+    return get_mandatory_param(args, atribute_st, str, missing_params, constants.REGEXP_COORDS)
+
+
+def get_otp_ddbb_parameters(cnx, args):
+    # ===== Mandatory arguments
+    missing_params = None
+    time_slot = get_mandatory_param(args, "timeSlot", int, missing_params)
+    from_place = get_coord_attribute(args, "fromPlace", missing_params)
+    to_place = get_coord_attribute(args, "toPlace", missing_params)
+    preferences, errors = get_preferences_mandatory(args)
+    if errors:
+        missing_params = ("" if missing_params is None else str(missing_params) + ", ") + errors[0]["error"]
+
+    if missing_params:
+        return None, None, None, missing_params
+
+    # ===== Not mandatory
+    badly_formatted_opt_params = None
+    intermediate_nodes_ids_str = get_optional_param(args, "intermediateNodesIds", badly_formatted_opt_params,
+                                                    constants.REGEXP_INTERMEDIATE_NODES)
+    intermediate_nodes, intermediate_nodes_otp, errors = get_intermediate_nodes(cnx, intermediate_nodes_ids_str)
+
+    if errors:
+        return None, None, None, errors
+
+    arrive_by_bool = args.get("arriveBy", default=False, type=bool)
+    wheelchair = args.get("wheelchair", default=False, type=bool)
+    max_walk_distance = args.get("maxWalkDistance", default=None, type=int)
+
+    time_str = get_optional_param(args, "time", badly_formatted_opt_params, constants.REGEXP_TIME)
+    if time_str is None:
+        time_str = (get_time_slot_default_arrive_by(time_slot) if arrive_by_bool else
+                    get_time_slot_default_start_time(time_slot))
+
+    if badly_formatted_opt_params:
+        return None, None, None, badly_formatted_opt_params
+
+    time = datetime.datetime.strptime(time_str, "%H:%M")
+
+    # ===== Generates the OTP API parameter dictionary
+    otp_parameters = {
+        'fromPlace': from_place,
+        'toPlace': to_place,
+        'time': time.strftime("%#H:%M") + time.strftime("%p"),  # Change %#H:%M to %-H:%M if in linux
+        'mode': "WALK",
+        'date': datetime.date.today().strftime("%m-%d-%Y"),
+        'locale': 'es',
+        'arriveBy': arrive_by_bool,
+        'wheelchair': wheelchair
+    }
+
+    if intermediate_nodes_otp is not None:
+        otp_parameters['intermediateNodes'] = intermediate_nodes_otp
+    if max_walk_distance is not None:
+        otp_parameters['maxWalkDistance'] = max_walk_distance
+
+    # ===== Generate a DDBB parameter dictionary
+    DDBB_params = {
+        "plan_id": None,
+        "time_slot": time_slot,
+        "fecha_hora": time.strftime("%#H:%M:%S"),  # Change %#H:%M to %-H:%M if in linux
+        "arrive_by": int(otp_parameters["arriveBy"]),
+        "wheelchair": int(otp_parameters["wheelchair"]),
+        "max_walk_distance": max_walk_distance,
+        "from_lat": float(otp_parameters["fromPlace"].split(",")[0]),
+        "from_lon": float(otp_parameters["fromPlace"].split(",")[1]),
+        "to_lat": float(otp_parameters["toPlace"].split(",")[0]),
+        "to_lon": float(otp_parameters["toPlace"].split(",")[1]),
+    }
+    for pref_ind in range(len(constants.DEFAULT_PREFERENCES)):
+        DDBB_params[constants.DEFAULT_PREFERENCES[pref_ind]] = preferences[pref_ind]
+
+    return otp_parameters, DDBB_params, intermediate_nodes, missing_params
+
+
+def unclusterize_best_order(response, intermediate_nodes):
+    if intermediate_nodes is not None and "bestOrder" in response["requestParameters"]:
+        best_order_cluster = response["requestParameters"]["bestOrder"][1:-1].split(",")
+        best_order_unclusterized = []
+        for i in best_order_cluster:
+            for node in intermediate_nodes[int(i)]:
+                best_order_unclusterized.append(node[0])
+        response["requestParameters"]["bestOrder"] = ','.join([str(x) for x in best_order_unclusterized])
+    else:
+        response["requestParameters"]["bestOrder"] = ""
+    return response["requestParameters"]["bestOrder"]
+
+
+def get_best_order_plan_detail(otp_parameters_p: dict, intermediate_nodes):
+    otp_parameters = otp_parameters_p.copy()
+
+    if "bestOrder" in otp_parameters:
+        otp_parameters.pop("bestOrder")
+
+    response = call_otp(otp_parameters)
+
+    if "error" in response:
+        return None, response
+
+    _ = unclusterize_best_order(response, intermediate_nodes)
+    duration_sec = response["plan"]["itineraries"][0]["duration"]
+    walk_distance_meters = response["plan"]["itineraries"][0]["walkDistance"]
+
+    errors = {}
+    if "maxWalkDistance" in otp_parameters and int(walk_distance_meters) > otp_parameters["maxWalkDistance"]:
+        errors["walkDistance"] = {"msg": "Max walk distance exceded",
+                                  "amount": int(walk_distance_meters) - otp_parameters["maxWalkDistance"]}
+    if errors:
+        return None, errors
+    else:
+        return response, None
+
+
+def get_preferences_mandatory(args):
+    # There are preferences
+    params = [args.get(pref, default=0, type=float) for pref in constants.DEFAULT_PREFERENCES]
+    if sum(params) != 1:
+        return None, [{"error": f"Preferences should add up to 1 {constants.DEFAULT_PREFERENCES}"}, 415]
+    else:
+        try:
+            return params, None
+        except:
+            return None, [{"error": "Couldn't parse the preferences, are they floats?"}, 415]
+
+
+def get_preferences_optional(args):
+    # No preferences, just return it empty
+    if sum([param in constants.DEFAULT_PREFERENCES for param in args.keys()]) == 0:
+        return None, None
+
+    # There are preferences
+    return get_preferences_mandatory(args)
+
+
+def generate_plan(cnx, date):
+    response = execute_query(cnx, "CALL insert_plan(%s);", [date])
+    try:
+        return response[0][0], None
+    except:
+        return None, response
+
+
+def check_valid_action_ids(cnx, action_id_list):
+    if len(action_id_list) == 0:
+        return True
+    else:
+        action_s_str = ",".join(['%s' for x in action_id_list])
+        response = execute_query(cnx, f"SELECT p.action_id FROM action p WHERE p.action_id in ({action_s_str});", action_id_list)
+        return len(response) == len(action_id_list)