#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on: 16/01/2023 @author: Andoni Aranguren Ubierna @updates: Sergio Campos 02-03/2023 """ from src import utils, constants import json def __database_save_plan_detail__(cnx, otp_parameters, DDBB_params, intermediate_nodes): if intermediate_nodes: response, errors = utils.get_best_order_plan_detail(otp_parameters, intermediate_nodes) if errors: return errors DDBB_params["action_id_array"] = response["requestParameters"]["bestOrder"] if response is not None: query_params = [DDBB_params[x] for x in constants.DEFAULT_INSERT_plan_detail_ORDER] s_parameters_str = ', '.join(['%s'] * len(query_params)) _ = utils.execute_query(cnx, f"CALL insert_plan_detail({s_parameters_str});", query_params, no_return=True) def database_save_plan(cnx, request): """ It stores the plan_details that have been selected. :return: json with the plan_detail """ plan = request.json date = plan["date"] if date is None: return constants.ERROR_plan_DATE_MISSING # Comprobar que todos los IDs existen en la bbdd action_id = [] for plan_details in [x for x in plan["plan_details"].values() if x]: action_id += plan_details["requestParameters"]["bestOrder"].split(",") action_id = list({int(d): d for d in action_id if d}.values()) # Remove duplicates if not utils.check_valid_action_ids(cnx, action_id): return constants.ERROR_INVALID_action_ID plan_id, error = utils.generate_plan(cnx, date) if error: return constants.ERROR_plan_DATE_MISSING # Lanzar contra OTP para volver a conseguir el mejor orden for time_slot in plan["plan_details"].keys(): plan_detail = plan["plan_details"][time_slot] if plan_detail: plan_detail["requestParameters"]["intermediateNodes"] = None intermediate_nodes = None if "bestOrder" in plan_detail["requestParameters"]: intermediate_nodes_ids_str = plan_detail["requestParameters"]["bestOrder"] # Lanzar contra OTP para volver a conseguir el mejor orden intermediate_nodes, intermediate_nodes_otp = utils.get_intermediate_nodes(cnx, intermediate_nodes_ids_str) plan_detail["requestParameters"]["intermediateNodes"] = intermediate_nodes_otp plan_detail["DDBBParameters"]["plan_id"] = plan_id __database_save_plan_detail__(cnx, plan_detail["requestParameters"], plan_detail["DDBBParameters"], intermediate_nodes) return constants.ALL_RIGHT def database_store_kpi(cnx, request, relative=False, area=False): """ Stores kpi data from select json file in BD :return: """ cursor = cnx.cursor(buffered=True) dic = request.args.to_dict() data = [key for key in dic.keys()] file1 = open("extra_data/%s" % data[0], "r") file_contents= file1.read() parsed_json = json.loads(file_contents) keys_1_level = parsed_json.keys() kpis_bd_bio_data = {} pos = 0 for key in keys_1_level: if area == True and key == 'area': area = parsed_json['area'] if key == 'bilbao': cursor.execute(constants.sql_get_kpi_from_case_ids, ['BIO']) ret = cursor.fetchall() for row in ret: kpis_bd_bio_data[pos] = { 'id': row[0], 'kpi_name': row[1], 'kpi_full_name': row[2], 'parent_id': row[3], 'kpi_level': row[4], 'popularity': row[5], 'use_case_id': row[6], } pos += 1 load_json_kpi_data(cnx, parsed_json['bilbao'], 0, kpis_bd_bio_data, 'BIO', relative=relative, area=area) return constants.ALL_RIGHT def kpi_by_name_level(cnx, kpi_name, kpi_level, case_id, parent_id=None): cursor = cnx.cursor(buffered=True) cursor.execute(constants.sql_get_kpi_from_case_ids, [case_id]) ret = cursor.fetchall() pos = 0 kpis_dict = {} for row in ret: kpis_dict = { 'id': row[0], 'kpi_name': row[1], 'kpi_full_name': row[2], 'parent_id': row[3], 'kpi_level': row[4], 'popularity': row[5], 'use_case_id': row[6], } if kpi_name in kpis_dict.values() and kpis_dict['kpi_level'] == kpi_level and kpis_dict['parent_id'] == parent_id: return kpis_dict kpi_full_name = kpi_name if parent_id: for row in ret: if row[0] == parent_id: kpi_full_name = '%s \ %s' % (row[2], kpi_name) break kpis_dict = { 'kpi_name': kpi_name, 'kpi_full_name': kpi_full_name, 'parent_id': parent_id, 'kpi_level': kpi_level, 'popularity': 0, 'use_case_id': 'BIO', } cursor.execute(constants.sql_insert_kpi, [kpi_name, kpi_full_name, parent_id, kpi_level, 0, case_id]) cnx.commit() last = cursor.lastrowid kpis_dict['id'] = cursor.lastrowid return kpis_dict def json_key_level(json, key, level=0): # Devuelve el nivel del primero que encuentre for json_key in json.keys(): if key == json_key: return level else: return json_level(json[json_key], key, level+1) def get_json_key_bd_data(key, level, kpis_bd_bio_data, parent_id): ''' :param key: Json key search :param level: Key level :param kpis_bd_bio_data: BD DATA :return: Key data dict in BD or {} ''' bd_pos = 0 for bd_pos in kpis_bd_bio_data: if kpis_bd_bio_data[bd_pos]['kpi_name'] == key and kpis_bd_bio_data[bd_pos]['kpi_level'] == level and kpis_bd_bio_data[bd_pos]['parent_id'] == parent_id: return kpis_bd_bio_data[bd_pos] bd_pos +=1 return {} def get_kpi_data_by_id(kpi_id, kpis_bd_bio_data): ''' :param kpi_id: kpi id in BD :param kpis_bd_bio_data: BD DATA :return: Key data dict in BD or {} ''' bd_pos = 0 for bd_pos in kpis_bd_bio_data: if kpis_bd_bio_data[bd_pos]['id'] == kpi_id: return kpis_bd_bio_data[bd_pos] return {} def update_kpi_action(cnx, value, kpi_name, case_id, parent_id): cursor = cnx.cursor(buffered=True) cursor.execute(constants.sql_action_kpi, [parent_id]) ret = cursor.fetchall() if ret: cursor.execute(constants.sql_update_action_kpi, [4, parent_id, kpi_name, case_id, value, None, None, None, parent_id]) else: cursor.execute(constants.sql_insert_action_kpi, [4, parent_id, kpi_name, case_id, value, None, None, None]) cnx.commit() def get_action_kpi(cnx, action_kpi_id, zone_id=False): cursor = cnx.cursor(buffered=True) if zone_id: cursor.execute(constants.sql_action_kpi_zone, [action_kpi_id, zone_id]) else: cursor.execute(constants.sql_action_kpi, [action_kpi_id]) ret = cursor.fetchall() res_dict = {} if ret and len(ret) == 1: result = ret[0] res_dict = { 'action_id': result[0], 'kpi_id': result[1], 'kpi_name': result[2], 'use_case_id': result[3], 'absolute': result[4], # 'relative': ret[6], # 'geographical': ret[7], # 'zone_id': ret[8], } return res_dict def update_kpi_action_relative(cnx, value, kpi_name, case_id, relative, parent_id): cursor = cnx.cursor(buffered=True) kpi_action = get_action_kpi(cnx, parent_id) if kpi_action and value != 0: relative_data = (kpi_action['absolute'] - value) / (value * 100) cursor.execute(constants.sql_update_action_kpi, [14, parent_id, kpi_name, case_id, value, relative_data, None, None, parent_id]) cnx.commit() def update_kpi_action_area(cnx, value, kpi_name, case_id, area, parent_id): cursor = cnx.cursor(buffered=True) kpi_action = get_action_kpi(cnx, parent_id) if kpi_action and value != 0: cursor.execute(constants.sql_update_action_kpi_area, [6, parent_id, value, area, parent_id]) cnx.commit() def update_kpi_action_relative_area(cnx, value, kpi_name, case_id, relative_area, parent_id): cursor = cnx.cursor(buffered=True) kpi_action = get_action_kpi(cnx, parent_id, zone_id=relative_area) if kpi_action and value != 0: relative_data = (kpi_action['absolute'] - value) / (value * 100) cursor.execute(constants.sql_update_action_kpi_relative_area, [14, parent_id, relative_data, parent_id, relative_area]) cnx.commit() def load_json_kpi_data( cnx, json, level, kpis_bd_bio_data, case_id, relative=False, area=False, relative_area=False, parent_id=None): if isinstance(json, dict): for json_key in json.keys(): key_data = get_json_key_bd_data(json_key, level, kpis_bd_bio_data, parent_id) if not key_data: key_data = kpi_by_name_level(cnx, json_key, level, case_id, parent_id) load_json_kpi_data(cnx, json[json_key], level+1, kpis_bd_bio_data, case_id, relative=relative, area=area, relative_area=relative_area, parent_id=key_data['id']) else: parent_dict = get_kpi_data_by_id(parent_id, kpis_bd_bio_data) if parent_dict: if relative: update_kpi_action_relative(cnx, json, parent_dict['kpi_name'], case_id, relative, parent_id=parent_dict['id']) elif area: update_kpi_action_area(cnx, json, parent_dict['kpi_name'], case_id, area, parent_id=parent_dict['id']) elif relative_area: update_kpi_action_relative_area(cnx, json, parent_dict['kpi_name'], case_id, relative_area, parent_id=parent_dict['id']) else: update_kpi_action(cnx, json, parent_dict['kpi_name'], case_id, parent_dict['id']) def database_store_kpi_relative_area(cnx, request): """ Stores kpi data from select json file in BD :return: """ cursor = cnx.cursor(buffered=True) dic = request.args.to_dict() data = [key for key in dic.keys()] file1 = open("extra_data/%s" % data[0], "r") file_contents= file1.read() parsed_json = json.loads(file_contents) keys_1_level = parsed_json.keys() kpis_bd_bio_data = {} pos = 0 for key in keys_1_level: if key == 'area': relative_area = parsed_json['area'] if key == 'bilbao': cursor.execute(constants.sql_get_kpi_from_case_ids, ['BIO']) ret = cursor.fetchall() for row in ret: kpis_bd_bio_data[pos] = { 'id': row[0], 'kpi_name': row[1], 'kpi_full_name': row[2], 'parent_id': row[3], 'kpi_level': row[4], 'popularity': row[5], 'use_case_id': row[6], } pos += 1 load_json_kpi_data(cnx, parsed_json['bilbao'], 0, kpis_bd_bio_data, 'BIO', relative_area=relative_area) return constants.ALL_RIGHT