diff --git a/ddbb scripts/.gitkeep b/ddbb scripts/.gitkeep
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/recommender_bayesian.sql b/ddbb scripts/recommender_bayesian.sql
similarity index 100%
rename from recommender_bayesian.sql
rename to ddbb scripts/recommender_bayesian.sql
diff --git a/recommender_geographic.sql b/ddbb scripts/recommender_geographic.sql
similarity index 100%
rename from recommender_geographic.sql
rename to ddbb scripts/recommender_geographic.sql
diff --git a/recommender_popularity.sql b/ddbb scripts/recommender_popularity.sql
similarity index 100%
rename from recommender_popularity.sql
rename to ddbb scripts/recommender_popularity.sql
diff --git a/recommender_preferences.sql b/ddbb scripts/recommender_preferences.sql
similarity index 100%
rename from recommender_preferences.sql
rename to ddbb scripts/recommender_preferences.sql
diff --git a/recommender_tags.sql b/ddbb scripts/recommender_tags.sql
similarity index 100%
rename from recommender_tags.sql
rename to ddbb scripts/recommender_tags.sql
diff --git a/ejemplo.txt b/ejemplo.txt
deleted file mode 100644
index 528bea8aef078ee1429772d78d8239ecc2973d1f..0000000000000000000000000000000000000000
--- a/ejemplo.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-prueba1
-prueba2
-prueba3
diff --git a/kpis.json b/extra_data/kpis.json
similarity index 100%
rename from kpis.json
rename to extra_data/kpis.json
diff --git a/kpis_area.json b/extra_data/kpis_area.json
similarity index 100%
rename from kpis_area.json
rename to extra_data/kpis_area.json
diff --git a/src/app.py b/src/app.py
index d3a68904124c656252b3230bafc0b2ab2afe29ba..5b04d7ca4733f237d98c5d93a87d6182debc9147 100644
--- a/src/app.py
+++ b/src/app.py
@@ -101,6 +101,17 @@ def database_save_plan():
     else:
         return constants.ERROR_JSON_NEEDED
 
+@app.get("/database/store/kpi")
+def database_store_kpi():
+    """
+    It stores the Kpis that have been selected.
+    The json provided should have the same format that json with the kpi
+    """
+    #if request.json:
+    if request:
+        return database.database_store_kpi(cnx, request)
+    else:
+        return constants.ERROR_JSON_NEEDED
 
 # plan_detail PLANNER: -------------------------------------------------------------------------------------------------------
 @app.get("/planner/plan_detail")
diff --git a/src/constants.py b/src/constants.py
index 2a705001903c7c08e4d979fc7b6b61ac16a60531..0bb333dc9882eccfd793c39477856740aaf1fe9e 100644
--- a/src/constants.py
+++ b/src/constants.py
@@ -40,6 +40,7 @@ plan_JSON_TEMPLATE = dict(date=None, plan_details={0: {},  # Morning plan_detail
 # Load data
 PATH_plan_detailS = "data/Rutas"
 PATH_actionS = "data/actions"
+PATH_extra = "../extra_data"
 
 # Load data - SQL queries
 sql_insert_action = """
@@ -51,12 +52,6 @@ sql_insert_kpi = """INSERT IGNORE INTO kpi(kpi_name) VALUES(%s)"""
 
 sql_get_kpi_ids = """SELECT kpi_name, kpi_id FROM kpi WHERE kpi_name IN ('{names}')"""
 
-sql_insert_action_kpi = """
-INSERT IGNORE INTO kpi_action(action_id, kpi_id)
-VALUES ( (SELECT action_id FROM action WHERE lat = %s AND lon = %s AND action_name = %s),
-         {kpi})
-"""
-
 sql_insert_plan = """
 INSERT IGNORE INTO plan(plan_date)
 VALUES(%s);
@@ -78,3 +73,24 @@ INSERT IGNORE INTO CHOSEN_action(plan_id, time_slot, action_id)
 VALUES((SELECT plan_id FROM plan WHERE plan_date = '{date}'), %s, %s)
 """
 
+
+sql_insert_kpi = """
+INSERT IGNORE INTO kpi (kpi_name, kpi_full_name, parent_id, kpi_level, popularity, use_case_id)
+VALUES(%s, %s, %s, %s, %s, %s)
+"""
+
+sql_get_kpis_data = """SELECT id, kpi_name, kpi_full_name, parent_id, kpi_level, popularity, use_case_id FROM kpi"""
+sql_get_kpi_from_case_ids = """SELECT id, kpi_name, kpi_full_name, parent_id, kpi_level, popularity, use_case_id FROM kpi WHERE use_case_id IN (%s)"""
+
+sql_action_kpi = """SELECT id, action_id, kpi_id, kpi_name, use_case_id, absolute FROM kpi_action WHERE kpi_id = %s"""
+
+sql_insert_action_kpi = """
+INSERT IGNORE INTO kpi_action(action_id, kpi_id, kpi_name, use_case_id, absolute, relative, geographical, zoneId)
+VALUES (%s, %s, %s, %s, %s, %s, %s, %s)
+"""
+
+sql_update_action_kpi = """
+UPDATE kpi_action 
+SET action_id = %s, kpi_id = %s, kpi_name = %s, use_case_id = %s, absolute = %s, relative = %s, geographical = %s, zoneId = %s
+WHERE kpi_id = %s
+"""
diff --git a/src/database.py b/src/database.py
index c62049814bd78e97a6c51bcc300bb6980e89b63f..0d304f01915a580351f13a92abd9857edfa5cedb 100644
--- a/src/database.py
+++ b/src/database.py
@@ -7,6 +7,7 @@ Created on: 16/01/2023
 
 """
 from src import utils, constants
+import json
 
 
 def __database_save_plan_detail__(cnx, otp_parameters, DDBB_params, intermediate_nodes):
@@ -63,3 +64,128 @@ def database_save_plan(cnx, request):
                                     plan_detail["DDBBParameters"],
                                     intermediate_nodes)
     return constants.ALL_RIGHT
+
+def database_store_kpi(cnx, request):
+    """
+    Stores kpi data from select json file in BD
+    :return:
+    """
+    cursor = cnx.cursor(buffered=True)
+    dic = request.args.to_dict()
+    data = [key for key in dic.keys()]
+    file1 = open("../extra_data/%s" % data[0], "r")
+    file_contents= file1.read()
+    parsed_json = json.loads(file_contents)
+    keys_1_level = parsed_json.keys()
+    kpis_bd_bio_data = {}
+    pos = 0
+    for key in keys_1_level:
+        if key == 'bilbao':
+            cursor.execute(constants.sql_get_kpi_from_case_ids, ['BIO'])
+            ret = cursor.fetchall()
+            for row in ret:
+                kpis_bd_bio_data[pos] = {
+                    'id': row[0],
+                    'kpi_name': row[1],
+                    'kpi_full_name': row[2],
+                    'parent_id': row[3],
+                    'kpi_level': row[4],
+                    'popularity': row[5],
+                    'use_case_id': row[6],
+                }
+                pos += 1
+    load_json_kpi_data(cnx, parsed_json['bilbao'], 0, kpis_bd_bio_data, 'BIO')
+
+def kpi_by_name_level(cnx, kpi_name, kpi_level, case_id, parent_id=None):
+    cursor = cnx.cursor(buffered=True)
+    cursor.execute(constants.sql_get_kpi_from_case_ids, [case_id])
+    ret = cursor.fetchall()
+    pos = 0
+    kpis_dict = {}
+    for row in ret:
+        kpis_dict = {
+            'id': row[0],
+            'kpi_name': row[1],
+            'kpi_full_name': row[2],
+            'parent_id': row[3],
+            'kpi_level': row[4],
+            'popularity': row[5],
+            'use_case_id': row[6],
+            }
+        if kpi_name in kpis_dict.values() and kpis_dict['kpi_level'] == kpi_level and kpis_dict['parent_id'] == parent_id:
+            return kpis_dict
+    kpi_full_name = kpi_name
+    if parent_id:
+        for row in ret:
+            if row[0] == parent_id:
+                kpi_full_name = '%s \ %s' % (row[2], kpi_name)
+                break
+    kpis_dict = {
+        'kpi_name': kpi_name,
+        'kpi_full_name': kpi_full_name,
+        'parent_id': parent_id,
+        'kpi_level': kpi_level,
+        'popularity': 0,
+        'use_case_id': 'BIO',
+    }
+    cursor.execute(constants.sql_insert_kpi, [kpi_name, kpi_full_name, parent_id, kpi_level, 0, case_id])
+    cnx.commit()
+    last = cursor.lastrowid
+    kpis_dict['id'] = cursor.lastrowid
+    return kpis_dict
+
+def json_key_level(json, key, level=0):
+    # Devuelve el nivel del primero que encuentre
+    for json_key in json.keys():
+        if key == json_key:
+            return level
+        else:
+            return json_level(json[json_key], key, level+1)
+
+def get_json_key_bd_data(key, level, kpis_bd_bio_data, parent_id):
+    '''
+    :param key: Json key search
+    :param level:  Key level
+    :param kpis_bd_bio_data: BD DATA
+    :return: Key data dict in BD or {}
+    '''
+    bd_pos = 0
+    for bd_pos in kpis_bd_bio_data:
+        if kpis_bd_bio_data[bd_pos]['kpi_name'] == key and kpis_bd_bio_data[bd_pos]['kpi_level'] == level and kpis_bd_bio_data[bd_pos]['parent_id'] ==  parent_id:
+            return kpis_bd_bio_data[bd_pos]
+        bd_pos +=1
+    return {}
+
+def get_kpi_data_by_id(kpi_id, kpis_bd_bio_data):
+    '''
+    :param kpi_id:  kpi id in BD
+    :param kpis_bd_bio_data: BD DATA
+    :return: Key data dict in BD or {}
+    '''
+    bd_pos = 0
+    for bd_pos in kpis_bd_bio_data:
+        if kpis_bd_bio_data[bd_pos]['id'] == kpi_id:
+            return kpis_bd_bio_data[bd_pos]
+    return {}
+
+def update_kpi_action(cnx, value, kpi_name, case_id, parent_id):
+    cursor = cnx.cursor(buffered=True)
+    cursor.execute(constants.sql_action_kpi, [parent_id])
+    ret = cursor.fetchall()
+    if ret:
+        cursor.execute(constants.sql_update_action_kpi, [4, parent_id, kpi_name, case_id, value, None, None, None, parent_id])
+    else:
+        cursor.execute(constants.sql_insert_action_kpi, [4, parent_id, kpi_name, case_id, value, None, None, None])
+    cnx.commit()
+
+def load_json_kpi_data(cnx, json, level, kpis_bd_bio_data, case_id, parent_id=None):
+    if isinstance(json, dict):
+        for json_key in json.keys():
+            key_data = get_json_key_bd_data(json_key, level, kpis_bd_bio_data, parent_id)
+            if not key_data:
+                key_data = kpi_by_name_level(cnx, json_key, level, case_id, parent_id)
+            load_json_kpi_data(cnx, json[json_key], level+1, kpis_bd_bio_data, case_id, key_data['id'])
+    else:
+        parent_dict = get_kpi_data_by_id(parent_id, kpis_bd_bio_data)
+        if parent_dict:
+            update_kpi_action(cnx, json, parent_dict['kpi_name'], case_id, parent_dict['id'])