diff --git a/README.md b/README.md index b18ac48e9aa1a7c7e2a64081357972e829d59328..4daa1ef37c5f8884a6beecb7b6f77106a9d6a0d4 100644 --- a/README.md +++ b/README.md @@ -1,28 +1,27 @@ # T51 IaC Executor Manager -Running the server +Running the server with uvicorn + ```bash uvicorn main:app --reload ``` -###### Containers +Execute it directly -Containerize the IEM ```bash -docker build --build-arg API_KEY=$API_KEY -t optima-piacere-docker-dev.artifact.tecnalia.com/wp5/iem-api:y1 . +./main.py ``` -Similarly, docker compose can be used to build both -```bash -docker-compose build -``` +###### Containers + +Containerize the IEM -It can also be used to push them to the registry ```bash -docker-compose push +docker build --build-arg API_KEY=$API_KEY -t optima-piacere-docker-dev.artifact.tecnalia.com/wp5/iem-api:y1 . ``` -Run the IEM +Run the dockerized IEM + ```bash docker run -p 8000:8000 optima-piacere-docker-dev.artifact.tecnalia.com/wp5/iem-api:y1 ``` @@ -41,4 +40,15 @@ Run a single test nose2 -v tests.core.test_iem.TestIem.test_deploy_destroy_openstack ``` +Run unit and integration tests + +```bash +nose2 -v tests.unit +nose2 -v tests.it +``` + +Integration tests are prevented from being executed unless we deliberately define an environment variable +```bash +AWS=1 nose2 -v tests.it +``` diff --git a/docs/docs_files/01-intro.rst b/docs/docs_files/01-intro.rst index d3563bc65d8b3ad4aab9fc7721bd69101011203b..45533a1de0ba625b15ec6a32649dedc58beb291b 100644 --- a/docs/docs_files/01-intro.rst +++ b/docs/docs_files/01-intro.rst @@ -4,6 +4,7 @@ Introduction ************ -The IaC Execution Manager utilizes different technologies that can be used for the provisioning, configuration, and orchestration of the different infrastructural devices that can be found in a production deployment. This has served us to provide evidence and reasoning for the selection of the technologies that the IEM prototype is going to utilize. - -This prototype is viable for the deployment of different IaC technologies that cover the provisioning and the configuration of the infrastructural devices required for the projects utilizing the PIACERE framework. It provides a unified interface for other components so they can interact with the IEM in a unified manner. It can also be deployed in production utilizing container-based technologies which makes this prototype viable to be operationalized in public and private cloud provides, and on premises. For this prototype, the IEM supports two well established technologies (i.e. Ansible and Terraform) that are able to provision the different infrastructural devices required by the use cases, and the configuration of each of these infrastructural devices so they can accommodate the applications to be allocated. +.. + TODO Provide a brief description of the component here. Outline its goals, functionalities, etc.; + Mention subcomponents or extra delivered tools etc., with rst references to adequate sections. + \ No newline at end of file diff --git a/docs/kr-10.feature b/docs/kr-10.feature new file mode 100644 index 0000000000000000000000000000000000000000..2dbbdf97c7fd09d99e8204ed75223ad8d66c69da --- /dev/null +++ b/docs/kr-10.feature @@ -0,0 +1,49 @@ +Feature: PIACERE Run Time + +# The input of this scenario is detailed in the following +# https://git.code.tecnalia.com/piacere/private/t51-iem/iem/-/blob/y2/openapi.json#/deployments/deploy_deployments__post +# The following scenario relates to REQ81, REQ83, REQ84, REQ87 +Scenario: Deploy a fresh project which comprises terraform, ansible, and docker +Given a project bundle in the relevant IaC technologies (terraform, ansible, docker-compose), the deployment id, and the required cloud credentials + When the user triggers the deployment + Then the IEM is invoked + And executes the stages of the bundle asyncronously + And the user is notified that the deployment has been accepted + +# The input of this scenario is detailed in the following +# https://git.code.tecnalia.com/piacere/private/t51-iem/iem/-/blob/y2/openapi.json#/deployments/read_status_deployment_deployments__deployment_id__get +# The following scenario relates to REQ55, REQ82 +Scenario: Query the status of a running project +Given the deployment id of an already existing project + When the user queries the status of the project + Then the IEM is invoked + And the user is notified of the status + +# The input of this scenario is detailed in the following +# https://git.code.tecnalia.com/piacere/private/t51-iem/iem/-/blob/y2/openapi.json#/deployments/undeploy_undeploy__post +# The following scenario relates to REQ81, REQ83, REQ84, REQ85 +Scenario: Undeploy a project +Given the deployment id of an already existing project and the required cloud credentials + When the user triggers the undeployment + Then the IEM is invoked + And tears down the entire deployment asyncronously + And the user is notified that the undeployment has been accepted + +# The input of this scenario is detailed in the following +# https://git.code.tecnalia.com/piacere/private/t51-iem/iem/-/blob/y2/openapi.json#/deployments/read_status_deployment_deployments__deployment_id__get +# The following scenario relates to REQ55, REQ82 +Scenario: Query the status of an undeployed project +Given the deployment id of an undeployed project + When the user queries the status of the project + Then the IEM is invoked + And the user is notified of the status + +# The input of this scenario is detailed in the following +# https://git.code.tecnalia.com/piacere/private/t51-iem/iem/-/blob/y2/openapi.json#/deployments/deploy_deployments__post +# The following scenario relates to REQ12, REQ81, REQ83, REQ84, REQ87 +Scenario: Redeploy a project +Given a project bundle in the relevant IaC technologies (terraform, ansible, docker-compose), the deployment id, and the required cloud credentials + When the user triggers the deployment + Then the IEM is invoked + And executes the stages of the bundle asyncronously + And the user is notified that the deployment has been accepted diff --git a/docs/sequence-diagrams/.gitignore b/docs/sequence-diagrams/.gitignore new file mode 100755 index 0000000000000000000000000000000000000000..981aeb856a9b1b9235d497476e7847867227ce4a --- /dev/null +++ b/docs/sequence-diagrams/.gitignore @@ -0,0 +1 @@ +/out \ No newline at end of file diff --git a/docs/sequence-diagrams/51-request-deployment-status.puml b/docs/sequence-diagrams/51-request-deployment-status.puml new file mode 100644 index 0000000000000000000000000000000000000000..752fd95b737797a19f268287e258b82a597a36d5 --- /dev/null +++ b/docs/sequence-diagrams/51-request-deployment-status.puml @@ -0,0 +1,26 @@ +@startuml + +title Request the Current Status of a Deployment + +participant "PRC" as DESIDE + +box "IaC Execution Manager" #LightBlue +participant "Rest API" as RTIEM_api #99FF99 +participant Core as RTIEM_core #99FF99 +participant Persistence as RTIEM_db #99FF99 +end box + + +DESIDE -> RTIEM_api: Deployment Status Request + +RTIEM_api -> RTIEM_core: Deployment Status Request + +RTIEM_core -> RTIEM_db: Deployment Status Request + +RTIEM_core <-- RTIEM_db: Deployment Status Response + +RTIEM_api <-- RTIEM_core: Deployment Status Response + +DESIDE <-- RTIEM_api: Deployment Status Response + +@enduml diff --git a/docs/sequence-diagrams/51-start-deployment.puml b/docs/sequence-diagrams/51-start-deployment.puml new file mode 100644 index 0000000000000000000000000000000000000000..ef8b0ba7bee289739c01127e0ed6a82fcea3887a --- /dev/null +++ b/docs/sequence-diagrams/51-start-deployment.puml @@ -0,0 +1,34 @@ +@startuml + +title Initiate Deployment + +participant "Runtime Controller (PRC)" as RTPRC + +box "IaC Execution Manager" #LightBlue +participant "Rest API" as RTIEM_api #99FF99 +participant Core as RTIEM_core #99FF99 +participant Persistence as RTIEM_db #99FF99 +participant "Executor" as executor #99FF99 + +end box + +collections "Resource Provider" as infraresource + +RTPRC -> RTIEM_api: Deployment Request +RTPRC <-- RTIEM_api: Deployment Response + +RTIEM_api -> RTIEM_core: Deployment Request + +RTIEM_core -> RTIEM_db: Save Deployment Started + +RTIEM_core -> executor: Deployment Request + +executor -> infraresource: Deploy Commands +executor -> infraresource: ... +executor -> infraresource: Deploy Commands + +executor -> RTIEM_core: Deployment Response + +RTIEM_core -> RTIEM_db: Save Deployment Status + +@enduml diff --git a/docs/sequence-diagrams/51-start-undeployment.puml b/docs/sequence-diagrams/51-start-undeployment.puml new file mode 100644 index 0000000000000000000000000000000000000000..35bc219934721a6b5075032e0f56bce71205cf6f --- /dev/null +++ b/docs/sequence-diagrams/51-start-undeployment.puml @@ -0,0 +1,34 @@ +@startuml + +title Initiate Undeployment + +participant "Runtime Controller (PRC)" as RTPRC + +box "IaC Execution Manager" #LightBlue +participant "Rest API" as RTIEM_api #99FF99 +participant Core as RTIEM_core #99FF99 +participant Persistence as RTIEM_db #99FF99 +participant "Executor" as executor #99FF99 +end box + +collections "Resource Provider" as infraresource + +RTPRC -> RTIEM_api: Undeployment Request +RTPRC <-- RTIEM_api: Undeployment Response + +RTIEM_api -> RTIEM_core: Undeployment Request + +RTIEM_core -> RTIEM_db: Save Undeployment Started + +RTIEM_core -> executor: Undeployment Request + +executor -> infraresource: Uneploy Commands +executor -> infraresource: ... +executor -> infraresource: Undeploy Commands + +executor -> RTIEM_core: Undeployment Response + +RTIEM_core -> RTIEM_db: Save Undeployment Status + +@enduml + diff --git a/docs/sequence-diagrams/README.md b/docs/sequence-diagrams/README.md new file mode 100755 index 0000000000000000000000000000000000000000..cc0a33c7cc4a2bdfdd479cb48ca979a04135daa2 --- /dev/null +++ b/docs/sequence-diagrams/README.md @@ -0,0 +1,27 @@ +# T51 IaC Executor Manager Secuence diagrams + +This folder contains the sequence diagrams developed for the T51 IEM. They have been developed using plantuml +* https://plantuml.com + +These files follow a very simple text based syntax. ie +``` +Bob->Alice : Hello! +``` +which renders (providing plantuml is enabled in gitlab https://docs.gitlab.com/ee/administration/integration/plantuml.html) as + +```plantuml +Bob->Alice : Hello! +``` +we can also specify a file + +```plantuml source="51-start-deployment.puml" +``` + +To be able to edit them and check the rendering there are several options: +* Edit and generate the file using the jar, which is not very user friendly +``` java -jar plantuml.jar sequenceDiagram.txt ``` +* Use an IDE and a plugin. There are plugins available for different IDEs,i.e. + * eclipse https://plantuml.com/eclipse + * visual code https://marketplace.visualstudio.com/items?itemName=jebbs.plantuml + + diff --git a/iem-api/.python-version b/iem-api/.python-version index f69abe410a3c3fe2bf44a95f192f3c915ef19e1a..0a590336d5996461bb614a2b45ad675376e2d4d6 100644 --- a/iem-api/.python-version +++ b/iem-api/.python-version @@ -1 +1 @@ -3.9.7 +3.9.10 diff --git a/iem-api/Dockerfile b/iem-api/Dockerfile index 635043b24d1dd1182614380f044d470f0af21803..301030200b325e990f8f11e5282de30d5c70242b 100644 --- a/iem-api/Dockerfile +++ b/iem-api/Dockerfile @@ -1,17 +1,16 @@ FROM hashicorp/terraform:1.1.4 -ARG API_KEY +COPY requirements.txt /tmp/requirements.txt +RUN apk add py3-pip cargo g++ python3-dev file libffi-dev openssl-dev bash python3 gnupg +RUN pip3 install -r /tmp/requirements.txt +# install docker stack +RUN apk add docker docker-compose -ENV API_KEY=$API_KEY +ENV API_KEY=changeme ENV IEM_HOME=/opt/iem/ +ENV DOCKERIZED=true COPY src/resources/ansible.cfg /etc/ansible/ansible.cfg -COPY requirements.txt /tmp/requirements.txt -COPY src ${IEM_HOME}src -COPY main.py ${IEM_HOME}main.py - -RUN apk add py3-pip cargo g++ python3-dev file libffi-dev openssl-dev bash python3=3.9.13-r1 gnupg -RUN pip3 install -r /tmp/requirements.txt # RUN adduser -h ${IEM_HOME} -S -D iem COPY certs/config ${IEM_HOME}.ssh/config @@ -24,11 +23,17 @@ RUN adduser -h ${IEM_HOME} -S -D iem && \ chmod 0600 ${IEM_HOME}.ssh/id_rsa && \ chmod 0644 ${IEM_HOME}.ssh/id_rsa.pub USER iem -RUN ansible-galaxy collection install community.general COPY roles.yml /tmp/roles.yml RUN ansible-galaxy install -r /tmp/roles.yml +RUN mkdir -p ${IEM_HOME}db && \ + mkdir -p ${IEM_HOME}deployments + +COPY src ${IEM_HOME}src +COPY main.py ${IEM_HOME}main.py +COPY logging.ini ${IEM_HOME}logging.ini + ENTRYPOINT ["/usr/bin/env"] WORKDIR ${IEM_HOME} -CMD /usr/bin/uvicorn main:app --host 0.0.0.0 +CMD /usr/bin/uvicorn main:app --host 0.0.0.0 --log-level info EXPOSE 8000 diff --git a/iem-api/certs/config b/iem-api/certs/config new file mode 100644 index 0000000000000000000000000000000000000000..a3dd79d71dd647b0bf2b82cc61266647e7d979bc --- /dev/null +++ b/iem-api/certs/config @@ -0,0 +1,3 @@ +Host * + StrictHostKeyChecking no + UserKnownHostsFile=/dev/null \ No newline at end of file diff --git a/iem-api/certs/id_rsa b/iem-api/certs/id_rsa new file mode 100644 index 0000000000000000000000000000000000000000..86a197bf1b6772c1eda0f7ac9d77918997b54a4a --- /dev/null +++ b/iem-api/certs/id_rsa @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEpAIBAAKCAQEA1FrTNE42EgZr9WJNMtvpKFHYhPUJ4lzEp83EM0jYY3TyjmIe +ThMuqMLAHCk22fl4a8PttucggJ5ZWKhcJh623/y8AybJcmqZgq9a41Q609dmirf0 +7frCl+6zL8Mqy2Le2BD4eRADcq11s8r8Ys6J+EBPHQgEnK9CeZLSc/WFRlVr4bOD +s0bEouDxjTAMYjYcpsCwqYgGdIXI9WWsnt3RvcEe8CaiTqoyDN8ZtgkG6MweSrTQ +js8ySHO6o25cOoF7aT9Ihhf32I+KUanNIOvk3RAw2z1FK5xkFbbqMggZqz7rJn3M +sn2dDiCQi2CWox2OYXV/jJKLC3UFuOX64fS9cwIDAQABAoIBAQCs69Tm1/Vx0ibh +aA4DJ06C1bsh8cP9v5soJgfp1xzWSGooBcA1xasOI6B6jhkrgNlNr/uIIEe4VLne +1yJKrGIwnUagrloGQMYGxDKXwYQx80p+FXRuwe7p96eUcjIL8tQSUCd1tdOI87VQ +FjBVaWiybfO+aUQQLytLgoK7iKfhb7vO+9F+ZK1iDjBDNxFuiOM5zoeWOI7boYkD +2yXIkwoBePS2rosbPLa649sVakKex2WhQdUFst4Zba2RhnWQBXUY44LvEK5TzScF +FyYphPOUSplbzzM2+fuOna91NIWmJyHmf15lj7X9kC66XFIZMlvapksB8stEpDiA +4al3IdBJAoGBAPPuM3xkr/kQYYn7E42fgpmINZ78V48gMLhpyUOabZtF8inMyMPB +q7kfHns8Vx0ET8grSNr3qwDDV82lwvGqRCFChASMdQRR9LanydkDSeqpuZyQtVlt +A/65YUdcNY7Vy+M+fRh5Srh/6qcO3beLeLWXbJ4RHBP/OEmHuF4mLfgVAoGBAN7c +qdxTOhXPvOU69Bs5rJdfo6qBI1Yg8MCGctsUwPFpw1kW773ROOPa6XI6D74Dsdg8 +ypZ+IC3pRVtx61Xi3NOwxWNTTG+dyUgTSFz+WKjywXZXeHIbWngiFqk8JFYQWPzk +6YaJk4tZhk2YuNNaCCYRgQqyWv8coEurRlMXZHlnAoGBALcJwdaQ0z8oXJimL4jw +7ZX5kIrpPWanuAdZUe4Jfj+qX8mf4fKKbCowQLYmlBOw/ZDtcfDlMYsUCdnFjZ+7 +rP3sJJYpM1F3khJRm3PdNOUCUMY8C+i7lejZADcE6SdyJFkztbjcowYI7nJHBHZL +ENvqcVW27wPOWlVKozz6lzn1AoGALVwmaoS6DtRwcwuzwZLUkR7TNhIAujgMKHN1 +DyhDOR+4tfpYI39hH+dfmnM83wTrfsKozUawkAepqToflySMo72X/2Zl6VXpMPVT +xjGyo/h87fRRvI/asxblG9702luLcTW6XjrEQBmhn0uVWtc5T15CsIWqxb/y1FPx +BVp+hcMCgYAlJXbjzjbbDoIOCsXPSPe9voBL8zVwp0aNuvQcuB/vCt1n1c1DWuPr +AGMy/fRwY0Znag+ODMuulm7RgXUQy6ifJHiz9cKVGg/mGifaJSjgC+1AI9HFlij3 +asM5CueU0gK974rDxQkwmIWpRH57+kf6s8tGDrPPvqX9S4p3oxFlTw== +-----END RSA PRIVATE KEY----- diff --git a/iem-api/certs/id_rsa.pub b/iem-api/certs/id_rsa.pub new file mode 100644 index 0000000000000000000000000000000000000000..245caaf060f0a8a06f315a8ba009f65b1e429d18 --- /dev/null +++ b/iem-api/certs/id_rsa.pub @@ -0,0 +1 @@ +ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDUWtM0TjYSBmv1Yk0y2+koUdiE9QniXMSnzcQzSNhjdPKOYh5OEy6owsAcKTbZ+Xhrw+225yCAnllYqFwmHrbf/LwDJslyapmCr1rjVDrT12aKt/Tt+sKX7rMvwyrLYt7YEPh5EANyrXWzyvxizon4QE8dCAScr0J5ktJz9YVGVWvhs4OzRsSi4PGNMAxiNhymwLCpiAZ0hcj1Zaye3dG9wR7wJqJOqjIM3xm2CQbozB5KtNCOzzJIc7qjblw6gXtpP0iGF/fYj4pRqc0g6+TdEDDbPUUrnGQVtuoyCBmrPusmfcyyfZ0OIJCLYJajHY5hdX+MkosLdQW45frh9L1z josu@WKM0092A diff --git a/iem-api/db/.gitignore b/iem-api/db/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..8d96d78ffb43f7e6e74d5dab1726cd346f6d23ba --- /dev/null +++ b/iem-api/db/.gitignore @@ -0,0 +1 @@ +iem.db diff --git a/iem-api/logging.ini b/iem-api/logging.ini new file mode 100644 index 0000000000000000000000000000000000000000..7a1ca118cbc13a3b9c05bb00e579c9a043a49a3c --- /dev/null +++ b/iem-api/logging.ini @@ -0,0 +1,31 @@ +[loggers] +keys=root,src,uvicorn + +[handlers] +keys=stream_handler + +[formatters] +keys=formatter + +[logger_root] +level=INFO +handlers=stream_handler + +[logger_src] +level=INFO +handlers=stream_handler +qualname=src +propagate=0 + +[logger_uvicorn] +level=INFO +handlers=stream_handler +qualname=uvicorn +propagate=0 + +[handler_stream_handler] +class=StreamHandler +formatter=formatter + +[formatter_formatter] +format=%(asctime)s %(name)-12s %(levelname)-8s %(message)s diff --git a/iem-api/main.py b/iem-api/main.py index 3fb84f4b479de3ea5635bd2ca4b3138d7bed5a1a..ec8ad099ee75240f85befb0e7589d17a585e58af 100755 --- a/iem-api/main.py +++ b/iem-api/main.py @@ -1,33 +1,34 @@ +#!/usr/bin/env python3 + import json import logging -import os - -from fastapi import FastAPI, BackgroundTasks, status, Security, Depends, HTTPException -from fastapi.openapi.utils import get_openapi -from fastapi.security.api_key import APIKeyHeader, APIKey from typing import List +import uvicorn +from fastapi import (BackgroundTasks, Depends, FastAPI, HTTPException, + Security, status) +from fastapi.openapi.utils import get_openapi +from fastapi.security.api_key import APIKey, APIKeyHeader +from src import buildno, major, minor, revision from src.core.iem import Iem -from src.core.persistence import Sqlite -from src.core.utils import ( - BaseResponse, - DeploymentResponse, - DeploymentRequest, - DeleteDeploymentRequest, -) - -LOGGER = logging.getLogger("iem") +from src.core.persistence import Persistence +from src.core.utils import (BaseResponse, DeleteDeploymentRequest, + DeploymentRequest, DeploymentResponse, + SelfHealingRequest, DeploymentStatusRequest) api_key_header = APIKeyHeader(name="x-api-key", auto_error=False) app = FastAPI( - title="IaC Execution Manager", version="0.1.15", description="IaC Execution Manager" + title="IaC Execution Manager", + version=f"{major}.{minor}.{revision}.{buildno}", + description="IaC Execution Manager", ) +logging.config.fileConfig("logging.ini") -async def get_api_key(api_key_query: str = Security(api_key_header)): - if Sqlite().valid_api_key(api_key_query=api_key_query): +async def get_api_key(api_key_query: str = Security(api_key_header)): + if Persistence().valid_api_key(api_key_query=api_key_query): return api_key_query else: raise HTTPException( @@ -37,22 +38,18 @@ async def get_api_key(api_key_query: str = Security(api_key_header)): @app.get("/", tags=["greeting"]) -async def read_root(api_key: APIKey = Depends(get_api_key)): +async def read_root(_: APIKey = Depends(get_api_key)): return { "message": "Hello from the IaC Execution Manager!", "version": app.version, "terraform": "1.1.4", - "ansible": "5.5.0", + "ansible": "8.5.0", } @app.get("/deployments/", response_model=List[DeploymentResponse], tags=["deployments"]) async def read_status( - start: int = 0, - count: int = 25, - start_date: str = "1970-01-01", - end_date: str = "2100-01-01", - api_key: APIKey = Depends(get_api_key), + _: APIKey = Depends(get_api_key), ): all_deployments = Iem(credentials=None).get_all_deployments() return list(all_deployments) @@ -65,14 +62,29 @@ async def read_status( ) async def read_status_deployment( deployment_id: str, - start: int = 0, - count: int = 1, - api_key: APIKey = Depends(get_api_key), + _: APIKey = Depends(get_api_key), ): deployment = Iem().get_deployment(deployment_id=deployment_id) return list(deployment) +@app.get( + "/deployments/{deployment_id}/stages/{stage_id}/outputs", + response_model=dict, + tags=["deployments"], +) +async def read_deployment_outputs( + deployment_id: str, + stage_id: str, + d: DeploymentStatusRequest, + _: APIKey = Depends(get_api_key), +): + outputs = Iem(credentials=d.credentials).get_deployment_outputs( + deployment_id=deployment_id, stage_id=stage_id + ) + return outputs + + @app.post( "/deployments/", status_code=status.HTTP_201_CREATED, @@ -82,11 +94,10 @@ async def read_status_deployment( async def deploy( d: DeploymentRequest, background_tasks: BackgroundTasks, - api_key: APIKey = Depends(get_api_key), + _: APIKey = Depends(get_api_key), ): - logging.warning(d) i = Iem(credentials=d.credentials) - background_tasks.add_task(i.deploy, d.deployment_id, d.repository, d.commit) + background_tasks.add_task(i.deploy, d.deployment_id, d.bundle.base64) return BaseResponse(message="Deployment Request Created") @@ -99,15 +110,48 @@ async def deploy( async def undeploy( d: DeleteDeploymentRequest, background_tasks: BackgroundTasks, - api_key: APIKey = Depends(get_api_key), + _: APIKey = Depends(get_api_key), ): - logging.warning(d) i = Iem(credentials=d.credentials) background_tasks.add_task(i.destroy, d.deployment_id) return BaseResponse(message="Undeployment Request Created") -if os.getenv("STAGE") == "dev": +@app.post( + "/deployments/{deployment_id}/self-healing", + status_code=status.HTTP_201_CREATED, + response_model=BaseResponse, + tags=["deployments"], +) +async def self_healing_strategy( + deployment_id: str, + d: SelfHealingRequest, + background_tasks: BackgroundTasks, + _: APIKey = Depends(get_api_key), +): + i = Iem(credentials=d.credentials) + background_tasks.add_task(i.self_healing_strategy, deployment_id, d.playbook) + return BaseResponse(message=f"Self-Healing Strategy Request Triggered") + + +@app.post( + "/update-iac-bundle/", + status_code=status.HTTP_201_CREATED, + response_model=BaseResponse, + tags=["deployments"], +) +async def self_healing_bundle( + d: DeploymentRequest, + background_tasks: BackgroundTasks, + _: APIKey = Depends(get_api_key), +): + i = Iem(credentials=d.credentials) + background_tasks.add_task(i.self_healing_bundle, d.deployment_id, d.bundle.base64) + return BaseResponse(message="Bundle Replacement Created") + + +if __name__ == "__main__": + uvicorn.run("main:app", host="127.0.0.1", port=8000, log_level="info") with open("../openapi.json", "w") as f: json.dump( get_openapi( diff --git a/iem-api/requirements.txt b/iem-api/requirements.txt index 00500ee3778f876c0c44d1ec58281adc288329dc..19fad26e08d297e72b45895b938ff7709cf28ab4 100644 --- a/iem-api/requirements.txt +++ b/iem-api/requirements.txt @@ -1,7 +1,6 @@ fastapi==0.73.0 uvicorn==0.17.0.post1 -ansible==5.5.0 -ansible-core==2.12.3 +ansible==8.5.0 GitPython==3.1.26 requests==2.26.0 ratelimiter==1.2.0.post0 diff --git a/iem-api/src/__init__.py b/iem-api/src/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..aa9f25d804970378353cbec7e79ad12e9407be3a --- /dev/null +++ b/iem-api/src/__init__.py @@ -0,0 +1 @@ +from ._version import buildno, major, minor, revision diff --git a/iem-api/src/_version.py b/iem-api/src/_version.py new file mode 100644 index 0000000000000000000000000000000000000000..9d0e591c16644e098ebc4753345143b8c789551c --- /dev/null +++ b/iem-api/src/_version.py @@ -0,0 +1,4 @@ +major = 3 +minor = 0 +revision = 1 +buildno = 18 diff --git a/iem-api/src/core/engine.py b/iem-api/src/core/engine.py index 08f25cb557bd109225b33d981be86000d83c3214..43cb59eb10aad3e8ef34155aba6392adb4ca1d45 100644 --- a/iem-api/src/core/engine.py +++ b/iem-api/src/core/engine.py @@ -1,12 +1,12 @@ import logging import os import subprocess - +import time from abc import ABC, abstractmethod -from jinja2 import Template -from subprocess import CalledProcessError -LOGGER = logging.getLogger("iem") +from jinja2 import Environment, FileSystemLoader + +LOGGER = logging.getLogger(__name__) class Factory: @@ -26,6 +26,18 @@ class Engine(ABC): self._repo_path = repo_path self._env = env + def _run_command(self, args: list) -> subprocess.CompletedProcess: + output = subprocess.run( + args=args, cwd=self._repo_path, env=self._env, capture_output=True + ) + if output.returncode == 0: + LOGGER.info(output.stdout.decode("utf-8")) + LOGGER.info(output.stderr.decode("utf-8")) + else: + LOGGER.error(output.stdout.decode("utf-8")) + LOGGER.error(output.stderr.decode("utf-8")) + return output + @abstractmethod def apply(self): pass @@ -43,107 +55,80 @@ class Engine(ABC): class Terraform(Engine): - def __init__(self, repo_path, my_env): + def __init__(self, repo_path, my_env, skip_inventory=False): super().__init__(name="Terraform", repo_path=repo_path, env=my_env) def apply(self): LOGGER.info("About to apply terraform") - try: - output = subprocess.run( - ["terraform", "init"], - check=True, - cwd=self._repo_path, - env=self._env, - capture_output=True, - ) - output = subprocess.run( - ["terraform", "apply", "-auto-approve"], - check=True, - cwd=self._repo_path, - env=self._env, - # capture_output=True, - ) - return "CREATED", output.stdout, output.stderr - except CalledProcessError as e: - LOGGER.exception(e) - return "ERROR", None, None + + args = ["terraform", "init"] + output = self._run_command(args=args) + if output.returncode != 0: + return output.returncode, output.stdout, output.stderr + + args = ["terraform", "apply", "-auto-approve"] + output = self._run_command(args=args) + return output.returncode, output.stdout, output.stderr def destroy(self): - try: - output = subprocess.run( - ["terraform", "destroy", "-auto-approve"], - check=True, - cwd=self._repo_path, - env=self._env, - # capture_output=True, - ) - return "DESTROYED", output.stdout, output.stderr - except CalledProcessError as e: - LOGGER.exception(e) - return "ERROR", None, None + args = ["terraform", "destroy", "-auto-approve"] + output = self._run_command(args=args) + return output.returncode, output.stdout, output.stderr def output(self): - try: - output = subprocess.run( - ["terraform", "output", "-json"], - check=True, - cwd=self._repo_path, - env=self._env, - capture_output=True, - ) - return output.stdout - except CalledProcessError as e: - LOGGER.exception(e) - return None + args = ["terraform", "output", "-json"] + output = self._run_command(args=args) + output.check_returncode() + return output.stdout class Ansible(Engine): - def __init__(self, repo_path, my_env): + def __init__(self, repo_path, my_env, skip_inventory=False): super().__init__(name="Ansible", repo_path=repo_path, env=my_env) - self.__parse_inventory() + self.__parse_inventory() if not skip_inventory else None def __parse_inventory(self): - with open(f"{self._repo_path}/inventory.j2", "r") as f: - inventory = Template(f.read()) + environment = Environment(loader=FileSystemLoader({self._repo_path})) with open(f"{self._repo_path}/inventory", "w") as f: - f.write(inventory.render(self._env)) - - with open(f"{self._repo_path}/ssh_key.j2", "r") as f: - ssh_key = Template(f.read()) + template = environment.get_template("inventory.j2") + f.write(template.render(self._env)) with open(f"{self._repo_path}/ssh_key", "w") as f: - f.write(ssh_key.render(self._env)) + template = environment.get_template("ssh_key.j2") + f.write(template.render(self._env)) os.chmod(f"{self._repo_path}/ssh_key", 0o0600) def apply(self): LOGGER.info("About to apply ansible") - try: - output = subprocess.run( - ["ansible", "all", "-i", "inventory", "-m", "wait_for_connection"], - check=True, - cwd=self._repo_path, - env=self._env, - capture_output=True, - ) + for _ in range(2): + args = [ + "ansible", + "all", + "-i", + "inventory", + "-m", + "wait_for_connection", + ] + output = self._run_command(args=args) + if output.returncode != 0: + time.sleep(10) + continue LOGGER.info("All hosts in the inventory are reachable.") - output = subprocess.run( - ["ansible-playbook", "-i", "inventory", "main.yml"], - check=True, - cwd=self._repo_path, - env=self._env, - capture_output=True, - ) - return "CREATED", output.stdout, output.stderr - except CalledProcessError as e: - LOGGER.exception(e.output) - raise e + + args = ["ansible-playbook", "-i", "inventory", "main.yml"] + output = self._run_command(args=args) + if output.returncode != 0: + time.sleep(10) + continue + + return output.returncode, output.stdout, output.stderr def destroy(self): LOGGER.info("Nothing to be seen here.") - return "DESTROYED", None, None + return 0, None, None def output(self): LOGGER.info("Nothing to be seen here.") diff --git a/iem-api/src/core/iem.py b/iem-api/src/core/iem.py index e4ad215022efdf497b2358ff720cde87a7f155f2..8ba06ec213839061e47a85a137da0400a7d787d4 100644 --- a/iem-api/src/core/iem.py +++ b/iem-api/src/core/iem.py @@ -1,54 +1,38 @@ -import git +import base64 +import binascii import json import logging import os -import subprocess - -from git import GitCommandError, InvalidGitRepositoryError -from omegaconf import OmegaConf +import shutil +from io import BytesIO from subprocess import CalledProcessError +from zipfile import BadZipFile, ZipFile +from omegaconf import OmegaConf from src.core.engine import Factory -from src.core.persistence import Sqlite -from src.core.utils import DeploymentResponse, Credentials +from src.core.persistence import Persistence +from src.core.utils import Credentials, DeploymentResponse -LOGGER = logging.getLogger("iem") +LOGGER = logging.getLogger(__name__) class Iem: def __init__(self, credentials: Credentials = None): - logging.basicConfig( - level=logging.INFO, format="%(asctime)s %(levelname)-8s %(message)s" - ) - self._credentials = credentials - # if credentials.aws: - # self._aws_access_key_id = credentials.aws.access_key_id - # self._aws_secret_access_key = credentials.aws.secret_access_key - - # if credentials.openstack: - # self._os_username = credentials.openstack.user_name - # self._os_password = credentials.openstack.password - # self._os_auth_url = credentials.openstack.auth_url - # self._os_project_name = credentials.openstack.project_name - # Check IEM_HOME variable - if os.getenv("IEM_HOME") is None: - LOGGER.error("Please define IEM_HOME environment variable.") - exit(-1) - self._iem_home = os.getenv("IEM_HOME") + self._path_deployments = f"{os.environ['IEM_HOME']}deployments/" - self._persistence = Sqlite() + self._persistence = Persistence() def get_all_deployments(self): rows = self._persistence.get_all_deployments() for r in rows: d = DeploymentResponse( - status_time=r[0], - deployment_id=r[1], - status=r[2], - stdout=r[3], - stderr=r[4], + status_time=r.status_time, + deployment_id=r.deployment_id, + status=r.status, + stdout=r.stdout, + stderr=r.stderr, ) yield d @@ -56,67 +40,139 @@ class Iem: row = self._persistence.get_deployment(deployment_id=deployment_id) if row: d = DeploymentResponse( - status_time=row[0], - deployment_id=row[1], - status=row[2], - stdout=row[3], - stderr=row[4], + status_time=row.status_time, + deployment_id=row.deployment_id, + status=row.status, + stdout=row.stdout, + stderr=row.stderr, ) yield d - def deploy(self, deployment_id: str, repository: str, commit: str): + def get_deployment_outputs(self, deployment_id: str, stage_id: str): + function_name = "get_deployment_outputs" + LOGGER.info(f"Running {function_name} method.") + + repo_path = f"{self._path_deployments}{deployment_id}" + + LOGGER.info(f"Reading credentials.") + my_env = self._get_env( + deployment_id=deployment_id, credentials=self._credentials + ) + + LOGGER.info(f"About to read outputs for project {repo_path}.") + + conf = OmegaConf.load(f"{repo_path}/{stage_id}/config.yaml") + self.validate(env=my_env, io=conf.input) + + my_eng = Factory().get_engine(conf.engine)( + repo_path=f"{repo_path}/{stage_id}", + my_env=my_env, + ) + + output = my_eng.output() + + return dict(json.loads(output)) + + def _get_env(self, deployment_id: str, credentials: Credentials) -> dict: + my_env = os.environ.copy() + if credentials.aws: + my_env["AWS_ACCESS_KEY_ID"] = credentials.aws.access_key_id + my_env["AWS_SECRET_ACCESS_KEY"] = credentials.aws.secret_access_key + my_env["AWS_REGION"] = credentials.aws.region + if credentials.openstack: + my_env["OS_USERNAME"] = credentials.openstack.user_name + my_env["OS_PASSWORD"] = credentials.openstack.password + my_env["OS_AUTH_URL"] = credentials.openstack.auth_url + my_env["OS_PROJECT_NAME"] = credentials.openstack.project_name + if credentials.azure: + my_env["ARM_CLIENT_ID"] = credentials.azure.arm_client_id + my_env["ARM_CLIENT_SECRET"] = credentials.azure.arm_client_secret + my_env["ARM_SUBSCRIPTION_ID"] = credentials.azure.arm_subscription_id + my_env["ARM_TENANT_ID"] = credentials.azure.arm_tenant_id + if credentials.vmware: + my_env["VSPHERE_USER"] = credentials.vmware.user_name + my_env["VSPHERE_PASSWORD"] = credentials.vmware.password + my_env["VSPHERE_SERVER"] = credentials.vmware.server + my_env[ + "VSPHERE_ALLOW_UNVERIFIED_SSL" + ] = credentials.vmware.allow_unverified_ssl + if credentials.custom: + for key, value in credentials.custom.items(): + my_env[key] = value + + my_env["DEPLOYMENT_ID"] = deployment_id + + return my_env + + def _apply_stage( + self, my_env: dict, repo_path: str, stage: str, skip_inventory=False + ): + conf = OmegaConf.load(f"{repo_path}/{stage}/config.yaml") + + self.validate(env=my_env, io=conf.input) if not skip_inventory else None + + LOGGER.info(f"About to run stage: {stage}") + my_eng = Factory().get_engine(conf.engine)( + repo_path=f"{repo_path}/{stage}", + my_env=my_env, + skip_inventory=skip_inventory, + ) + returncode, stdout, stderr = my_eng.apply() + + if returncode == 0 and conf.output: + output = my_eng.output() + self.update_env(output=output, env=my_env, io=conf.output) + self.validate(env=my_env, io=conf.output) + + return returncode, stdout, stderr + + def deploy(self, deployment_id: str, bundle: str) -> int: function_name = "deploy" - LOGGER.info(f"Running {function_name} method") + LOGGER.info(f"Running {function_name} method.") self._persistence.insert_deployment( deployment_id=deployment_id, status="STARTED", stdout=None, stderr=None ) + repo_path = f"{self._path_deployments}{deployment_id}" + if not os.path.exists(repo_path): + os.makedirs(repo_path) + try: - repo_path = f"{self._iem_home}{deployment_id}" - self.get_repo(repo_path=repo_path, repository=repository, commit=commit) - - LOGGER.info(f"About to deploy project {repo_path}") - my_env = os.environ.copy() - if self._credentials.aws: - my_env["AWS_ACCESS_KEY_ID"] = self._credentials.aws.access_key_id - my_env[ - "AWS_SECRET_ACCESS_KEY" - ] = self._credentials.aws.secret_access_key - if self._credentials.openstack: - my_env["OS_USERNAME"] = self._credentials.openstack.user_name - my_env["OS_PASSWORD"] = self._credentials.openstack.password - my_env["OS_AUTH_URL"] = self._credentials.openstack.auth_url - my_env["OS_PROJECT_NAME"] = self._credentials.openstack.project_name + LOGGER.info(f"Decompressing base64 bundle.") + bundle_decode = base64.b64decode(bundle) + ZipFile(BytesIO(bundle_decode)).extractall(repo_path) + + LOGGER.info(f"Reading credentials.") + my_env = self._get_env( + deployment_id=deployment_id, credentials=self._credentials + ) + LOGGER.info(f"About to deploy project {repo_path}.") project_conf = OmegaConf.load(f"{repo_path}/config.yaml") for stage in project_conf.iac: - conf = OmegaConf.load(f"{repo_path}/{stage}/config.yaml") - - self.validate(env=my_env, io=conf.input) - - my_eng = Factory().get_engine(conf.engine)( - repo_path=f"{repo_path}/{stage}", my_env=my_env + returncode, stdout, stderr = self._apply_stage( + my_env=my_env, repo_path=repo_path, stage=stage ) - status, stdout, stderr = my_eng.apply() - - if conf.output: - output = my_eng.output() - self.update_env(output=output, env=my_env, io=conf.output) - self.validate(env=my_env, io=conf.output) + if returncode != 0: + LOGGER.error(f"Deployment failed for project {repo_path}.") + break self._persistence.insert_deployment( deployment_id=deployment_id, - status=status, + status="CREATED" if returncode == 0 else "FAILED", stdout=stdout, stderr=stderr, ) + LOGGER.info(f"Deployment completed for project {repo_path}") + return returncode except ( + binascii.Error, + BadZipFile, CalledProcessError, - GitCommandError, FileNotFoundError, - InvalidGitRepositoryError, NameError, + KeyError, ) as e: LOGGER.exception(e) self._persistence.insert_deployment( @@ -124,23 +180,15 @@ class Iem: ) raise e - LOGGER.info(f"The {function_name} method finished successfully") - def destroy(self, deployment_id: str): function_name = "destroy" LOGGER.info(f"Running {function_name} method") - repo_path = f"{self._iem_home}{deployment_id}" + repo_path = f"{self._path_deployments}{deployment_id}" - my_env = os.environ.copy() - if self._credentials.aws: - my_env["AWS_ACCESS_KEY_ID"] = self._credentials.aws.access_key_id - my_env["AWS_SECRET_ACCESS_KEY"] = self._credentials.aws.secret_access_key - if self._credentials.openstack: - my_env["OS_USERNAME"] = self._credentials.openstack.user_name - my_env["OS_PASSWORD"] = self._credentials.openstack.password - my_env["OS_AUTH_URL"] = self._credentials.openstack.auth_url - my_env["OS_PROJECT_NAME"] = self._credentials.openstack.project_name + my_env = self._get_env( + deployment_id=deployment_id, credentials=self._credentials + ) LOGGER.info(f"About to destroy project {repo_path}") project_conf = OmegaConf.load(f"{repo_path}/config.yaml") @@ -151,11 +199,19 @@ class Iem: my_eng = Factory().get_engine(conf.engine)( repo_path=f"{repo_path}/{stage}", my_env=my_env ) - status, stdout, stderr = my_eng.destroy() + returncode, stdout, stderr = my_eng.destroy() + if returncode != 0: + LOGGER.error(f"Undeployment failed for project {repo_path}.") + break + + if "/home" in repo_path: + LOGGER.exception(f"Cannot delete {repo_path}") + elif os.getenv("DOCKERIZED") == "true": + shutil.rmtree(repo_path) self._persistence.insert_deployment( deployment_id=deployment_id, - status=status, + status="DESTROYED" if returncode == 0 else "FAILED", stdout=stdout, stderr=stderr, ) @@ -174,20 +230,114 @@ class Iem: LOGGER.error(f"Variable {variable} not found in environment.") raise NameError - def get_repo(self, repo_path: str, repository: str, commit: str): - LOGGER.info( - f"About to download the project {repo_path} with repository {repository}" + def self_healing_strategy(self, deployment_id: str, playbook: str): + function_name = "self_healing_strategy" + LOGGER.info(f"Running {function_name} method") + + self._persistence.insert_deployment( + deployment_id=deployment_id, status="SHS-STARTED", stdout=None, stderr=None ) - if not os.path.isdir(repo_path): - os.makedirs(repo_path) - repo = git.Repo.clone_from( - url=repository, to_path=repo_path, no_checkout=True + + LOGGER.info(f"The {function_name} method finished successfully") + repo_path = f"{self._path_deployments}{deployment_id}" + if not os.path.exists(repo_path): + LOGGER.error( + f"The deployment_id = {deployment_id} does not correspond to any active deployment." ) - else: - repo = git.Repo(repo_path) + return - repo.git.checkout(commit) + try: + LOGGER.info(f"Updating main.yml with new playbook.") + with open(f"{repo_path}/self_healing_monitoring/main.yml", "w") as f: + f.write(playbook) - for submodule in repo.submodules: - submodule.update(init=True) - submodule.module().git.checkout() + LOGGER.info(f"Reading credentials.") + my_env = self._get_env( + deployment_id=deployment_id, credentials=self._credentials + ) + + returncode, stdout, stderr = self._apply_stage( + my_env=my_env, + repo_path=repo_path, + stage=f"self_healing_monitoring", + skip_inventory=True, + ) + if returncode != 0: + LOGGER.error(f"Update failed for project {repo_path}.") + + self._persistence.insert_deployment( + deployment_id=deployment_id, + status="UPDATED" if returncode == 0 else "FAILED", + stdout=stdout, + stderr=stderr, + ) + LOGGER.info(f"Update completed for project {repo_path}") + except ( + binascii.Error, + CalledProcessError, + FileNotFoundError, + NameError, + KeyError, + ) as e: + LOGGER.exception(e) + self._persistence.insert_deployment( + deployment_id=deployment_id, status="ERROR", stdout=None, stderr=None + ) + raise e + + def self_healing_bundle(self, deployment_id: str, bundle: str) -> int: + function_name = "self_healing_bundle" + LOGGER.info(f"Running {function_name} method") + + self._persistence.insert_deployment( + deployment_id=deployment_id, status="UPDATING", stdout=None, stderr=None + ) + + repo_path = f"{self._path_deployments}{deployment_id}" + if not os.path.exists(repo_path): + LOGGER.error( + f"The deployment_id = {deployment_id} does not correspond to any active deployment." + ) + return -1 + + try: + LOGGER.info(f"Decompressing base64 bundle.") + bundle_decode = base64.b64decode(bundle) + ZipFile(BytesIO(bundle_decode)).extractall(repo_path) + + LOGGER.info(f"Reading credentials.") + my_env = self._get_env( + deployment_id=deployment_id, credentials=self._credentials + ) + + LOGGER.info(f"About to update project {repo_path}.") + project_conf = OmegaConf.load(f"{repo_path}/config.yaml") + for stage in project_conf.iac: + returncode, stdout, stderr = self._apply_stage( + my_env=my_env, repo_path=repo_path, stage=stage, skip_inventory=True + ) + if returncode != 0: + LOGGER.error(f"Update failed for project {repo_path}.") + break + + self._persistence.insert_deployment( + deployment_id=deployment_id, + status="UPDATED" if returncode == 0 else "FAILED", + stdout=stdout, + stderr=stderr, + ) + LOGGER.info(f"Update completed for project {repo_path}") + return returncode + except ( + binascii.Error, + BadZipFile, + CalledProcessError, + FileNotFoundError, + NameError, + KeyError, + ) as e: + LOGGER.exception(e) + self._persistence.insert_deployment( + deployment_id=deployment_id, status="ERROR", stdout=None, stderr=None + ) + raise e diff --git a/iem-api/src/core/persistence.py b/iem-api/src/core/persistence.py index 2f4979e53ff3085ed75c5d8bf5e6ab17a120314b..10c3a2d1b95058943cff2168328f1e057547376a 100644 --- a/iem-api/src/core/persistence.py +++ b/iem-api/src/core/persistence.py @@ -1,91 +1,21 @@ +import logging import os -import sqlite3 -# from dataclasses import dataclass -from datetime import datetime from ratelimiter import RateLimiter +from sqlalchemy import (Column, DateTime, Integer, String, create_engine, desc, + func, select) +from sqlalchemy.orm import Session, declarative_base -from sqlalchemy import create_engine -from sqlalchemy import Column -from sqlalchemy import DateTime, Integer, String -from sqlalchemy import select, desc -from sqlalchemy.orm import declarative_base, Session +LOGGER = logging.getLogger(__name__) Base = declarative_base() -class Sqlite: - def __init__(self): - self._db_name = "iem.db" - - # create database if it does not exist - if not os.path.isfile(self._db_name): - self.__create_database() - - def __create_database(self): - conn = sqlite3.connect(self._db_name) - with conn: - sql = """CREATE TABLE deployments (id INTEGER PRIMARY KEY AUTOINCREMENT, - status_time DATETIME DEFAULT CURRENT_TIMESTAMP, - deployment_id TEXT NOT NULL, status TEXT NOT NULL, stdout TEXT, stderr TEXT);""" - conn.execute(sql) - - def insert_deployment( - self, deployment_id: str, status: str, stdout: str, stderr: str - ): - conn = sqlite3.connect(self._db_name) - with conn: - sql = """INSERT INTO deployments (deployment_id, status, stdout, stderr) VALUES (?,?,?,?)""" - - cursor = conn.cursor() - cursor.execute( - sql, - ( - deployment_id, - status, - stdout, - stderr, - ), - ) - conn.commit() - - def get_deployment(self, deployment_id: str): - conn = sqlite3.connect(self._db_name) - with conn: - sql = """SELECT status_time, deployment_id, status, stdout, stderr FROM deployments - WHERE deployment_id=? ORDER BY id DESC LIMIT 1""" - - cursor = conn.cursor() - cursor.execute(sql, (deployment_id,)) - row = cursor.fetchone() - - return row - - def get_all_deployments(self): - conn = sqlite3.connect(self._db_name) - with conn: - sql = """SELECT status_time, deployment_id, status, stdout, stderr - FROM deployments ORDER BY id DESC LIMIT 25 OFFSET 0""" - - cursor = conn.cursor() - cursor.execute(sql) - rows = cursor.fetchall() - - return rows - - @RateLimiter(max_calls=10, period=1) - def valid_api_key(self, api_key_query: str): - if api_key_query == os.getenv("API_KEY"): - return True - else: - return False - - class Deployment(Base): __tablename__ = "deployments" id = Column(Integer, primary_key=True) - status_time = Column(DateTime, default=datetime.now()) + status_time = Column(DateTime, default=func.now()) deployment_id = Column(String, nullable=False) status = Column(String, nullable=False) stdout = Column(String) @@ -96,8 +26,8 @@ class Deployment(Base): class Persistence: - def __init__(self): - self._engine = create_engine("sqlite:///:memory:", future=True) + def __init__(self, engine_url="sqlite:///db/iem.db"): + self._engine = create_engine(url=engine_url, future=True) Base.metadata.create_all(self._engine) def insert_deployment( diff --git a/iem-api/src/core/utils.py b/iem-api/src/core/utils.py index 5ec5df2b1914207333f99027ccb25371cea6b9ea..4c4bcf1640c60a80e997eea14eae19bd232f840e 100644 --- a/iem-api/src/core/utils.py +++ b/iem-api/src/core/utils.py @@ -1,10 +1,7 @@ -import logging - from datetime import datetime -from pydantic import BaseModel -from typing import Dict, Optional +from typing import Any, Dict, Optional -LOGGER = logging.getLogger("iem") +from pydantic import BaseModel class BaseResponse(BaseModel): @@ -22,6 +19,7 @@ class DeploymentResponse(BaseModel): class Aws(BaseModel): access_key_id: str secret_access_key: str + region: Optional[str] = "us-west-2" class Azure(BaseModel): @@ -42,19 +40,46 @@ class Openstack(BaseModel): user_domain_name: Optional[str] +class Docker(BaseModel): + server: str + user_name: str + password: str + + +class Vmware(BaseModel): + user_name: str + password: str + server: str + allow_unverified_ssl: Optional[str] + + class Credentials(BaseModel): aws: Optional[Aws] = None azure: Optional[Azure] = None openstack: Optional[Openstack] = None + vmware: Optional[Vmware] = None + docker: Optional[Docker] = None + custom: Optional[dict[str, Any]] = None + + +class Bundle(BaseModel): + base64: str class DeploymentRequest(BaseModel): deployment_id: str - repository: str - commit: str credentials: Credentials + bundle: Bundle + + +class SelfHealingRequest(BaseModel): + credentials: Credentials + playbook: str class DeleteDeploymentRequest(BaseModel): deployment_id: str credentials: Credentials + +class DeploymentStatusRequest(BaseModel): + credentials: Credentials diff --git a/iem-api/src/resources/id_iem b/iem-api/src/resources/id_iem new file mode 100644 index 0000000000000000000000000000000000000000..2f5c3078d02d507c7dd8a41c16ce3a24b897dac3 --- /dev/null +++ b/iem-api/src/resources/id_iem @@ -0,0 +1,38 @@ +-----BEGIN OPENSSH PRIVATE KEY----- +b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAABlwAAAAdzc2gtcn +NhAAAAAwEAAQAAAYEA1d1XmDw9VikKatj87TVENCAJ5uCQpgjyaQi3HpvZDtw7zW1QTeUN +CRdJ36oHPG9cD+7gR9W9xvI2l+2bKugqTbuDr1ysPxAcTt6XYgMer9JkWTCwcdqnAXdUwf +dLMUPo7mOv/9OnOdjoEo0NwnsvGvPxinbUzMUSofqwLgHIdlo3GQZk2T3vvRN3zoYmx4OV +AN7C4RXAD7hLhEEW0wwUrAqnQqh13yHuDYi6iWFSzh5B7h3700Wi0Toe85n9tBZiD+7NYG +FNxqQykYGXrqNmTPe/dTffAdCYEp3qW8dvzA7htlrwBcByCu1kjVcRrsXb17KVFnwxmpvx +uGRrsc2sVR7cGIEssTaAFF3L/NhkvzDS16qX2qbz0fyxnFgBHINRYM45AvMuDAWerUA6MA +GK/wRJ1CO8ZrSJtIsQVnXdsxrcjKEm76hzdA0rETPmA5A+FzHH3fSqfZIiFh5dTfUUnldw +9STXwxjRgBNasptuTYtM1jW0shFMaS5F4+E9sBAzAAAFkHZrcNl2a3DZAAAAB3NzaC1yc2 +EAAAGBANXdV5g8PVYpCmrY/O01RDQgCebgkKYI8mkItx6b2Q7cO81tUE3lDQkXSd+qBzxv +XA/u4EfVvcbyNpftmyroKk27g69crD8QHE7el2IDHq/SZFkwsHHapwF3VMH3SzFD6O5jr/ +/TpznY6BKNDcJ7Lxrz8Yp21MzFEqH6sC4ByHZaNxkGZNk9770Td86GJseDlQDewuEVwA+4 +S4RBFtMMFKwKp0Kodd8h7g2IuolhUs4eQe4d+9NFotE6HvOZ/bQWYg/uzWBhTcakMpGBl6 +6jZkz3v3U33wHQmBKd6lvHb8wO4bZa8AXAcgrtZI1XEa7F29eylRZ8MZqb8bhka7HNrFUe +3BiBLLE2gBRdy/zYZL8w0teql9qm89H8sZxYARyDUWDOOQLzLgwFnq1AOjABiv8ESdQjvG +a0ibSLEFZ13bMa3IyhJu+oc3QNKxEz5gOQPhcxx930qn2SIhYeXU31FJ5XcPUk18MY0YAT +WrKbbk2LTNY1tLIRTGkuRePhPbAQMwAAAAMBAAEAAAGAB5BynrHSwY9mDO1r1MADj4xqjT +34H8dFO63RPEXq4Xmsq9Fn+7lUQrQOKtkKtHqD2RRr3l6S/cxnXexLhrL7fBBb0gIHHZvm +RGvfEtplZXadkgIE26IOMiEUYF/syutJ+9SOzw+fZI5ldvKCQBS3T869Bla5pBx8UjpZrO +bnPjhmpn3xZzWnmxprLGTWTkw7IvK+FdP9HRE5qo3aztAokwU1cUggEypSDyx83IsSsLOl +RVTOKWTXI2tY2OjjblE0SiFimH7btHH2VhdAWEtOujs04cKU1mQ7pPnZS0uK5PfkC89I7v +zeslnsMsZ73jtBMtCWnNwh3Om7saP4GK0G58pQqyC4SAN/4Clf6PGB+/dtQsJG1XDQ+Idc +IjlSly43tX6Q3t+M7VtfGbLYy/vUF01r2PaG5G+BUucH11DQrGxw/gMQhpqHaXu5C7PpxG +4yBxF58BaMqRPPTJWgTFuMw1Ib2Ogda5dRuC6Qi2hJ3T2S2IkFXDwGWabRGIpqKXMhAAAA +wChxJsE8SV5uxbYtgPKGFcHvZLqsX7Hg3q9GXDjqCiFmJCl2GpFnqacxqfhTlTGShHBB6q +O71zUOktX/Dbje7VTTBKhHLrpqsp7JXWao7v9w007LxhKAR3XEencoynmWTL9lGBvbQ8mX +X5Q4zqMt+1GqH5ae4BuT4BtlqD+R9WO4ZvnsEKUuCFXbndZlWsMaRD7J/PQXM/vrsDg0yF +9dHUH8INayw4utgQ/FwmdkFx6YAxnwqgcoYX0KzoX3FJhN+QAAAMEA+fiPdeB4tVLGLQyF +k/4SDjE1pyba0XUwl0hoc8IWILaiwmncn1uRLCbRSYZp+gikFheFY7zIzbdnZXPv4vpX8a +0ydCdksE9iwPxuhzMb5EbyoWCffUuDmiWaVu5H3Q9rveJwFPB5HJXmT4S0ZTS5JZHZi6nb +h55CTKFAoCd9pUBEZUATWUBfml+WurF72C9VzxiSqyd9S8XVwE2Oq72sxyI3S4VUGKvJ+t +wZZFU1BE7qoJWyV0RiV3eiez9Mum0rAAAAwQDbBdgafpny35V4Yc5YFr+k1FSv3Xw9fduH ++uqyskdHpB3anUe366ZsSpnQcLjbDFsW45K9tZxe8GacTwyiTfbdrT/m5zWzLV3cMBnVyv +Pf1oYSjrV4H1BDShikc5LCWGje+FWxONgtB2oOcUCQo7pPq/YYD984DsurBY9l1VWJM+T4 +U1KTMp9x9b+xoINP0+9uXZHLmL7LsxOGhygXhUCmE4XBCmJYZdEEodwlUco8RBraapFYqN +inyHJxviLktRkAAAAUdmFncmFudEB1YnVudHUtZm9jYWwBAgMEBQYH +-----END OPENSSH PRIVATE KEY----- diff --git a/iem-api/src/resources/id_iem.pub b/iem-api/src/resources/id_iem.pub new file mode 100644 index 0000000000000000000000000000000000000000..c4368a5642c47e33f57f5df4e56bd874aebcccad --- /dev/null +++ b/iem-api/src/resources/id_iem.pub @@ -0,0 +1 @@ +ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDV3VeYPD1WKQpq2PztNUQ0IAnm4JCmCPJpCLcem9kO3DvNbVBN5Q0JF0nfqgc8b1wP7uBH1b3G8jaX7Zsq6CpNu4OvXKw/EBxO3pdiAx6v0mRZMLBx2qcBd1TB90sxQ+juY6//06c52OgSjQ3Cey8a8/GKdtTMxRKh+rAuAch2WjcZBmTZPe+9E3fOhibHg5UA3sLhFcAPuEuEQRbTDBSsCqdCqHXfIe4NiLqJYVLOHkHuHfvTRaLROh7zmf20FmIP7s1gYU3GpDKRgZeuo2ZM9791N98B0JgSnepbx2/MDuG2WvAFwHIK7WSNVxGuxdvXspUWfDGam/G4ZGuxzaxVHtwYgSyxNoAUXcv82GS/MNLXqpfapvPR/LGcWAEcg1FgzjkC8y4MBZ6tQDowAYr/BEnUI7xmtIm0ixBWdd2zGtyMoSbvqHN0DSsRM+YDkD4XMcfd9Kp9kiIWHl1N9RSeV3D1JNfDGNGAE1qym25Ni0zWNbSyEUxpLkXj4T2wEDM= diff --git a/iem-api/tests/__init__.py b/iem-api/tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/iem-api/tests/it/__init__.py b/iem-api/tests/it/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/iem-api/tests/it/test_it_iem.py b/iem-api/tests/it/test_it_iem.py new file mode 100644 index 0000000000000000000000000000000000000000..529b41283a947ddd236f530b803eb22221ed0363 --- /dev/null +++ b/iem-api/tests/it/test_it_iem.py @@ -0,0 +1,149 @@ +import base64 +import logging +import os +import unittest +import uuid + +from src.core.iem import Iem +from src.core.utils import Aws, Credentials, Openstack + +LOGGER = logging.getLogger(__name__) + + +class TestIem(unittest.TestCase): + def __init__(self, *args, **kwargs): + super(TestIem, self).__init__(*args, **kwargs) + + # Check IEM_HOME variable + self._iem_home = os.environ["IEM_HOME"] + + @unittest.skipUnless(os.getenv("AWS"), "Define AWS variable to execute") + def test_deploy_destroy_aws(self): + + deployment_id = str(uuid.uuid4()) + a = Iem( + Credentials( + aws=Aws( + access_key_id=os.environ["AWS_ACCESS_KEY_ID"], + secret_access_key=os.environ["AWS_SECRET_ACCESS_KEY"], + ) + ) + ) + with open("tests/resources/aws.zip", "rb") as binary_file: + bundle = base64.b64encode(binary_file.read()) + + a.deploy(deployment_id=deployment_id, bundle=bundle) + + a.destroy(deployment_id=deployment_id) + + @unittest.skipUnless(os.getenv("OS"), "Define OS variable to execute") + def test_deploy_destroy_openstack(self): + + deployment_id = str(uuid.uuid4()) + a = Iem( + Credentials( + openstack=Openstack( + user_name=os.environ["OS_USERNAME"], + password=os.environ["OS_PASSWORD"], + auth_url=os.environ["OS_AUTH_URL"], + project_name=os.environ["OS_PROJECT_NAME"], + ) + ) + ) + with open("tests/resources/openstack.zip", "rb") as binary_file: + bundle = base64.b64encode(binary_file.read()) + + output = a.deploy(deployment_id=deployment_id, bundle=bundle) + + a.destroy(deployment_id=deployment_id) + + self.assertEqual(output, 0) + + @unittest.skipUnless(os.getenv("ANSIBLE"), "Define ANSIBLE variable to execute") + def test_deploy_destroy_ansible(self): + + deployment_id = str(uuid.uuid4()) + a = Iem( + Credentials( + openstack=Openstack( + user_name=os.environ["OS_USERNAME"], + password=os.environ["OS_PASSWORD"], + auth_url=os.environ["OS_AUTH_URL"], + project_name=os.environ["OS_PROJECT_NAME"], + ) + ) + ) + with open("tests/resources/ansible.zip", "rb") as binary_file: + bundle = base64.b64encode(binary_file.read()) + + output = a.deploy(deployment_id=deployment_id, bundle=bundle) + + a.destroy(deployment_id=deployment_id) + + self.assertEqual(output, 0) + + @unittest.skipUnless( + os.getenv("INTEGRATION"), "Define INTEGRATION variable to execute" + ) + def test_deploy_destroy_docker(self): + + deployment_id = str(uuid.uuid4()) + a = Iem( + Credentials( + openstack=Openstack( + user_name=os.environ["OS_USERNAME"], + password=os.environ["OS_PASSWORD"], + auth_url=os.environ["OS_AUTH_URL"], + project_name=os.environ["OS_PROJECT_NAME"], + ) + ) + ) + with open("tests/resources/docker.zip", "rb") as binary_file: + bundle = base64.b64encode(binary_file.read()) + + a.deploy(deployment_id=deployment_id, bundle=bundle) + + a.destroy(deployment_id=deployment_id) + + @unittest.skipUnless(os.getenv("SHS"), "Define SHS variable to execute") + def test_self_healing_strategy(self): + deployment_id = str(uuid.uuid4()) + a = Iem( + Credentials( + custom={ + "instance_usr": "vagrant", + "instance_pwd": "vagrant", + "instance_ip": "192.168.56.201", + } + ) + ) + with open("tests/resources/shs.zip", "rb") as binary_file: + bundle = base64.b64encode(binary_file.read()) + + a.deploy(deployment_id=deployment_id, bundle=bundle) + + a.self_healing_strategy(deployment_id=deployment_id, strategy="25") + + @unittest.skipUnless(os.getenv("SHS"), "Define SHS variable to execute") + def test_self_healing_bundle(self): + deployment_id = str(uuid.uuid4()) + a = Iem( + Credentials( + aws=Aws( + access_key_id=os.environ["AWS_ACCESS_KEY_ID"], + secret_access_key=os.environ["AWS_SECRET_ACCESS_KEY"], + ) + ) + ) + with open("tests/resources/shs.zip", "rb") as binary_file: + bundle = base64.b64encode(binary_file.read()) + + returncode = a.deploy(deployment_id=deployment_id, bundle=bundle) + self.assertEqual(returncode, 0) + + with open("tests/resources/shs-bundle.zip", "rb") as binary_file: + bundle = base64.b64encode(binary_file.read()) + returncode = a.self_healing_bundle(deployment_id=deployment_id, bundle=bundle) + self.assertEqual(returncode, 0) + + a.destroy(deployment_id=deployment_id) diff --git a/iem-api/tests/resources/ansible.zip b/iem-api/tests/resources/ansible.zip new file mode 100644 index 0000000000000000000000000000000000000000..7d2d24ddd670766af89958f489b8cfceff66c99a Binary files /dev/null and b/iem-api/tests/resources/ansible.zip differ diff --git a/iem-api/tests/resources/aws.zip b/iem-api/tests/resources/aws.zip new file mode 100644 index 0000000000000000000000000000000000000000..5813bd61d580f52708e4ed71dcb3a583fb107ddc Binary files /dev/null and b/iem-api/tests/resources/aws.zip differ diff --git a/iem-api/tests/resources/docker.zip b/iem-api/tests/resources/docker.zip new file mode 100644 index 0000000000000000000000000000000000000000..888efb0b9697997713ed9d800eb25fcf8bf989c9 Binary files /dev/null and b/iem-api/tests/resources/docker.zip differ diff --git a/iem-api/tests/resources/dummy.zip b/iem-api/tests/resources/dummy.zip new file mode 100644 index 0000000000000000000000000000000000000000..20d2bc346232701a6a840e0834e7709fdc67bab3 Binary files /dev/null and b/iem-api/tests/resources/dummy.zip differ diff --git a/iem-api/tests/resources/main.yml b/iem-api/tests/resources/main.yml new file mode 100644 index 0000000000000000000000000000000000000000..435d9f3d75b1f401d7b7d00741bacfa905f4a2f1 --- /dev/null +++ b/iem-api/tests/resources/main.yml @@ -0,0 +1,8 @@ +--- +- hosts: all + become: yes + tasks: + - name: install nmap + apt: + name: nmap + state: latest diff --git a/iem-api/tests/resources/openstack.zip b/iem-api/tests/resources/openstack.zip new file mode 100644 index 0000000000000000000000000000000000000000..ddadb660c3f9e3cbffc71126f543e8a9dd293689 Binary files /dev/null and b/iem-api/tests/resources/openstack.zip differ diff --git a/iem-api/tests/resources/shs-bundle.zip b/iem-api/tests/resources/shs-bundle.zip new file mode 100644 index 0000000000000000000000000000000000000000..afdb59146f1195c79fd82a8bd6f657ec1270b942 Binary files /dev/null and b/iem-api/tests/resources/shs-bundle.zip differ diff --git a/iem-api/tests/resources/shs.zip b/iem-api/tests/resources/shs.zip new file mode 100644 index 0000000000000000000000000000000000000000..0541812d6c84e6b1f5d87cdd0cef5255b9a6a16d Binary files /dev/null and b/iem-api/tests/resources/shs.zip differ diff --git a/iem-api/tests/unit/__init__.py b/iem-api/tests/unit/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/iem-api/tests/unit/test_iem.py b/iem-api/tests/unit/test_iem.py new file mode 100644 index 0000000000000000000000000000000000000000..97fa04dad9e5c134fc3c5ed2996bc7a5ebdf6746 --- /dev/null +++ b/iem-api/tests/unit/test_iem.py @@ -0,0 +1,31 @@ +import logging +import os +import unittest +import uuid + +from src.core.iem import Iem +from src.core.utils import Aws, Credentials, Openstack + +LOGGER = logging.getLogger(__name__) + + +class TestIem(unittest.TestCase): + def __init__(self, *args, **kwargs): + super(TestIem, self).__init__(*args, **kwargs) + + # Check IEM_HOME variable + self._iem_home = os.environ["IEM_HOME"] + + def test_get_all_deployments(self): + i = Iem() + all_deployments = i.get_all_deployments() + self.assertGreaterEqual(len(list(all_deployments)), 0) + + def test_get_deployment_ok(self): + i = Iem() + i.get_deployment(deployment_id="deployment2") + + def test_get_deployment_not_ok(self): + i = Iem() + deployment_id = str(uuid.uuid4()) + i.get_deployment(deployment_id=deployment_id) diff --git a/iem-api/tests/unit/test_main.py b/iem-api/tests/unit/test_main.py new file mode 100644 index 0000000000000000000000000000000000000000..fb4591d4de20d948477bacc3f5c8b123b57158e0 --- /dev/null +++ b/iem-api/tests/unit/test_main.py @@ -0,0 +1,224 @@ +import base64 +import os +import unittest +import uuid +from unittest.mock import Mock, patch + +from fastapi.testclient import TestClient +from main import app + + +class TestMain(unittest.TestCase): + def __init__(self, *args, **kwargs): + super(TestMain, self).__init__(*args, **kwargs) + + self.client = TestClient(app) + + # Check API_KEY variable + self._api_key = os.environ["API_KEY"] + + def test_root_no_apikey(self): + response = self.client.get("/") + assert response.status_code == 403 + + def test_root(self): + response = self.client.get("/", headers={"x-api-key": self._api_key}) + assert response.status_code == 200 + + def test_get_all_deployments(self): + response = self.client.get( + f"/deployments/", headers={"x-api-key": self._api_key} + ) + assert response.status_code == 200 + + def test_get_one_deployment(self): + response = self.client.get( + f"/deployments/25/", headers={"x-api-key": self._api_key} + ) + assert response.status_code == 200 + + @patch("subprocess.run") + def test_deployments(self, mock_run): + mock_run.return_value = Mock(returncode=0, stdout=b"{}", stderr=b"{}") + with open("tests/resources/dummy.zip", "rb") as binary_file: + bundle = base64.b64encode(binary_file.read()) + response = self.client.post( + f"/deployments/", + headers={"x-api-key": self._api_key}, + json={ + "deployment_id": str(uuid.uuid4()), + "credentials": { + "openstack": { + "user_name": "string", + "password": "string", + "auth_url": "string", + "project_name": "string", + "region_name": "string", + "domain_name": "string", + "project_domain_name": "string", + "user_domain_name": "string", + } + }, + "bundle": {"base64": bundle.decode("utf-8")}, + }, + ) + assert response.status_code == 201 + + @patch("subprocess.run") + def test_undeploy(self, mock_run): + mock_run.return_value = Mock(returncode=0, stdout=b"{}", stderr=b"{}") + + deployment_id = str(uuid.uuid4()) + + with open("tests/resources/dummy.zip", "rb") as binary_file: + bundle = base64.b64encode(binary_file.read()) + response = self.client.post( + f"/deployments/", + headers={"x-api-key": self._api_key}, + json={ + "deployment_id": deployment_id, + "credentials": { + "openstack": { + "user_name": "string", + "password": "string", + "auth_url": "string", + "project_name": "string", + "region_name": "string", + "domain_name": "string", + "project_domain_name": "string", + "user_domain_name": "string", + } + }, + "bundle": {"base64": bundle.decode("utf-8")}, + }, + ) + assert response.status_code == 201 + + response = self.client.post( + f"/undeploy/", + headers={"x-api-key": self._api_key}, + json={ + "deployment_id": deployment_id, + "credentials": {}, + }, + ) + assert response.status_code == 202 + + @patch("subprocess.run") + def test_custom(self, mock_run): + mock_run.return_value = Mock(returncode=0, stdout=b"{}", stderr=b"{}") + with open("tests/resources/dummy.zip", "rb") as binary_file: + bundle = base64.b64encode(binary_file.read()) + response = self.client.post( + f"/deployments/", + headers={"x-api-key": self._api_key}, + json={ + "deployment_id": str(uuid.uuid4()), + "credentials": { + "openstack": { + "user_name": "string", + "password": "string", + "auth_url": "string", + "project_name": "string", + "region_name": "string", + "domain_name": "string", + "project_domain_name": "string", + "user_domain_name": "string", + }, + "custom": { + "CUSTOM_VAR1": "string", + "CUSTOM_VAR2": "string", + "CUSTOM_VAR3": "string", + }, + }, + "bundle": {"base64": bundle.decode("utf-8")}, + }, + ) + assert response.status_code == 201 + + @patch("subprocess.run") + def test_self_healing_strategy(self, mock_run): + mock_run.return_value = Mock(returncode=0, stdout=b"{}", stderr=b"{}") + + deployment_id = str(uuid.uuid4()) + with open("tests/resources/shs.zip", "rb") as binary_file: + bundle = base64.b64encode(binary_file.read()) + + response = self.client.post( + f"/deployments/", + headers={"x-api-key": self._api_key}, + json={ + "deployment_id": deployment_id, + "credentials": {}, + "bundle": {"base64": bundle.decode("utf-8")}, + }, + ) + assert response.status_code == 201 + + response = self.client.post( + f"/self-healing/{25}", + headers={"x-api-key": self._api_key}, + json={ + "deployment_id": deployment_id, + "credentials": { + "custom": { + "instance_usr": "vagrant", + "instance_pwd": "vagrant", + "instance_ip": "192.168.56.201", + } + }, + }, + ) + assert response.status_code == 201 + + @patch("subprocess.run") + def test_self_healing_bundle(self, mock_run): + mock_run.return_value = Mock(returncode=0, stdout=b"{}", stderr=b"{}") + + deployment_id = str(uuid.uuid4()) + with open("tests/resources/dummy.zip", "rb") as binary_file: + bundle = base64.b64encode(binary_file.read()) + + response = self.client.post( + f"/deployments/", + headers={"x-api-key": self._api_key}, + json={ + "deployment_id": deployment_id, + "credentials": { + "openstack": { + "user_name": "string", + "password": "string", + "auth_url": "string", + "project_name": "string", + "region_name": "string", + "domain_name": "string", + "project_domain_name": "string", + "user_domain_name": "string", + } + }, + "bundle": {"base64": bundle.decode("utf-8")}, + }, + ) + assert response.status_code == 201 + + response = self.client.post( + f"/update-iac-bundle/", + headers={"x-api-key": self._api_key}, + json={ + "deployment_id": deployment_id, + "credentials": { + "openstack": { + "user_name": "string", + "password": "string", + "auth_url": "string", + "project_name": "string", + "region_name": "string", + "domain_name": "string", + "project_domain_name": "string", + "user_domain_name": "string", + } + }, + "bundle": {"base64": bundle.decode("utf-8")}, + }, + ) + assert response.status_code == 201 diff --git a/iem-api/tests/unit/test_persistence.py b/iem-api/tests/unit/test_persistence.py new file mode 100644 index 0000000000000000000000000000000000000000..6683e8b6e740dca89d0d0c98358ba7f0b4e23c87 --- /dev/null +++ b/iem-api/tests/unit/test_persistence.py @@ -0,0 +1,32 @@ +import unittest +import uuid + +from src.core.persistence import Persistence + + +class TestPersistence(unittest.TestCase): + @classmethod + def setUpClass(self): + self.engine_url = "sqlite:///:memory:" + + def test_insert_deployment(self): + Persistence(engine_url=self.engine_url).insert_deployment( + "deployment1", "STARTED", "stdout", "stderr" + ) + + def test_get_deployment(self): + p = Persistence(engine_url=self.engine_url) + p.insert_deployment("deployment2", "STOPPED", "stdout", "stderr") + p.insert_deployment("deployment2", "STARTED", "stdout", "stderr") + d = p.get_deployment("deployment2") + self.assertEqual(d.status, "STARTED") + + def test_get_deployment_not_ok(self): + row = Persistence(engine_url=self.engine_url).get_deployment(str(uuid.uuid4())) + self.assertIsNone(row) + + def test_get_all_deployments(self): + persistence = Persistence(engine_url=self.engine_url) + persistence.insert_deployment("deployment3", "STARTED", "stdout", "stderr") + rows = persistence.get_all_deployments() + self.assertGreaterEqual(len(list(rows)), 1) diff --git a/openapi.json b/openapi.json index 4727b5d0bb791f77d733001389fc2ba1cc28bb24..a127e229c149bb03e93828f47839fc17eb5a8b04 100644 --- a/openapi.json +++ b/openapi.json @@ -1 +1 @@ -{"openapi": "3.0.2", "info": {"title": "IaC Execution Manager", "description": "IaC Execution Manager", "version": "0.1.15"}, "paths": {"/": {"get": {"tags": ["greeting"], "summary": "Read Root", "operationId": "read_root__get", "responses": {"200": {"description": "Successful Response", "content": {"application/json": {"schema": {}}}}}, "security": [{"APIKeyHeader": []}]}}, "/deployments/": {"get": {"tags": ["deployments"], "summary": "Read Status", "operationId": "read_status_deployments__get", "parameters": [{"required": false, "schema": {"title": "Start", "type": "integer", "default": 0}, "name": "start", "in": "query"}, {"required": false, "schema": {"title": "Count", "type": "integer", "default": 25}, "name": "count", "in": "query"}, {"required": false, "schema": {"title": "Start Date", "type": "string", "default": "1970-01-01"}, "name": "start_date", "in": "query"}, {"required": false, "schema": {"title": "End Date", "type": "string", "default": "2100-01-01"}, "name": "end_date", "in": "query"}], "responses": {"200": {"description": "Successful Response", "content": {"application/json": {"schema": {"title": "Response Read Status Deployments Get", "type": "array", "items": {"$ref": "#/components/schemas/DeploymentResponse"}}}}}, "422": {"description": "Validation Error", "content": {"application/json": {"schema": {"$ref": "#/components/schemas/HTTPValidationError"}}}}}, "security": [{"APIKeyHeader": []}]}, "post": {"tags": ["deployments"], "summary": "Deploy", "operationId": "deploy_deployments__post", "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/DeploymentRequest"}}}, "required": true}, "responses": {"201": {"description": "Successful Response", "content": {"application/json": {"schema": {"$ref": "#/components/schemas/BaseResponse"}}}}, "422": {"description": "Validation Error", "content": {"application/json": {"schema": {"$ref": "#/components/schemas/HTTPValidationError"}}}}}, "security": [{"APIKeyHeader": []}]}}, "/deployments/{deployment_id}": {"get": {"tags": ["deployments"], "summary": "Read Status Deployment", "operationId": "read_status_deployment_deployments__deployment_id__get", "parameters": [{"required": true, "schema": {"title": "Deployment Id", "type": "string"}, "name": "deployment_id", "in": "path"}, {"required": false, "schema": {"title": "Start", "type": "integer", "default": 0}, "name": "start", "in": "query"}, {"required": false, "schema": {"title": "Count", "type": "integer", "default": 1}, "name": "count", "in": "query"}], "responses": {"200": {"description": "Successful Response", "content": {"application/json": {"schema": {"title": "Response Read Status Deployment Deployments Deployment Id Get", "type": "array", "items": {"$ref": "#/components/schemas/DeploymentResponse"}}}}}, "422": {"description": "Validation Error", "content": {"application/json": {"schema": {"$ref": "#/components/schemas/HTTPValidationError"}}}}}, "security": [{"APIKeyHeader": []}]}}, "/undeploy/": {"post": {"tags": ["deployments"], "summary": "Undeploy", "operationId": "undeploy_undeploy__post", "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/DeleteDeploymentRequest"}}}, "required": true}, "responses": {"202": {"description": "Successful Response", "content": {"application/json": {"schema": {"$ref": "#/components/schemas/BaseResponse"}}}}, "422": {"description": "Validation Error", "content": {"application/json": {"schema": {"$ref": "#/components/schemas/HTTPValidationError"}}}}}, "security": [{"APIKeyHeader": []}]}}}, "components": {"schemas": {"Aws": {"title": "Aws", "required": ["access_key_id", "secret_access_key"], "type": "object", "properties": {"access_key_id": {"title": "Access Key Id", "type": "string"}, "secret_access_key": {"title": "Secret Access Key", "type": "string"}}}, "Azure": {"title": "Azure", "required": ["arm_client_id", "arm_client_secret", "arm_subscription_id", "arm_tenant_id"], "type": "object", "properties": {"arm_client_id": {"title": "Arm Client Id", "type": "string"}, "arm_client_secret": {"title": "Arm Client Secret", "type": "string"}, "arm_subscription_id": {"title": "Arm Subscription Id", "type": "string"}, "arm_tenant_id": {"title": "Arm Tenant Id", "type": "string"}}}, "BaseResponse": {"title": "BaseResponse", "required": ["message"], "type": "object", "properties": {"message": {"title": "Message", "type": "string"}}}, "Credentials": {"title": "Credentials", "type": "object", "properties": {"aws": {"$ref": "#/components/schemas/Aws"}, "azure": {"$ref": "#/components/schemas/Azure"}, "openstack": {"$ref": "#/components/schemas/Openstack"}}}, "DeleteDeploymentRequest": {"title": "DeleteDeploymentRequest", "required": ["deployment_id", "credentials"], "type": "object", "properties": {"deployment_id": {"title": "Deployment Id", "type": "string"}, "credentials": {"$ref": "#/components/schemas/Credentials"}}}, "DeploymentRequest": {"title": "DeploymentRequest", "required": ["deployment_id", "repository", "commit", "credentials"], "type": "object", "properties": {"deployment_id": {"title": "Deployment Id", "type": "string"}, "repository": {"title": "Repository", "type": "string"}, "commit": {"title": "Commit", "type": "string"}, "credentials": {"$ref": "#/components/schemas/Credentials"}}}, "DeploymentResponse": {"title": "DeploymentResponse", "required": ["status_time", "deployment_id", "status"], "type": "object", "properties": {"status_time": {"title": "Status Time", "type": "string", "format": "date-time"}, "deployment_id": {"title": "Deployment Id", "type": "string"}, "status": {"title": "Status", "type": "string"}, "stdout": {"title": "Stdout", "type": "string"}, "stderr": {"title": "Stderr", "type": "string"}}}, "HTTPValidationError": {"title": "HTTPValidationError", "type": "object", "properties": {"detail": {"title": "Detail", "type": "array", "items": {"$ref": "#/components/schemas/ValidationError"}}}}, "Openstack": {"title": "Openstack", "required": ["user_name", "password", "auth_url", "project_name"], "type": "object", "properties": {"user_name": {"title": "User Name", "type": "string"}, "password": {"title": "Password", "type": "string"}, "auth_url": {"title": "Auth Url", "type": "string"}, "project_name": {"title": "Project Name", "type": "string"}, "region_name": {"title": "Region Name", "type": "string"}, "domain_name": {"title": "Domain Name", "type": "string"}, "project_domain_name": {"title": "Project Domain Name", "type": "string"}, "user_domain_name": {"title": "User Domain Name", "type": "string"}}}, "ValidationError": {"title": "ValidationError", "required": ["loc", "msg", "type"], "type": "object", "properties": {"loc": {"title": "Location", "type": "array", "items": {"type": "string"}}, "msg": {"title": "Message", "type": "string"}, "type": {"title": "Error Type", "type": "string"}}}}, "securitySchemes": {"APIKeyHeader": {"type": "apiKey", "in": "header", "name": "x-api-key"}}}} \ No newline at end of file +{"openapi": "3.0.2", "info": {"title": "IaC Execution Manager", "description": "IaC Execution Manager", "version": "3.0.1.17"}, "paths": {"/": {"get": {"tags": ["greeting"], "summary": "Read Root", "operationId": "read_root__get", "responses": {"200": {"description": "Successful Response", "content": {"application/json": {"schema": {}}}}}, "security": [{"APIKeyHeader": []}]}}, "/deployments/": {"get": {"tags": ["deployments"], "summary": "Read Status", "operationId": "read_status_deployments__get", "responses": {"200": {"description": "Successful Response", "content": {"application/json": {"schema": {"title": "Response Read Status Deployments Get", "type": "array", "items": {"$ref": "#/components/schemas/DeploymentResponse"}}}}}}, "security": [{"APIKeyHeader": []}]}, "post": {"tags": ["deployments"], "summary": "Deploy", "operationId": "deploy_deployments__post", "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/DeploymentRequest"}}}, "required": true}, "responses": {"201": {"description": "Successful Response", "content": {"application/json": {"schema": {"$ref": "#/components/schemas/BaseResponse"}}}}, "422": {"description": "Validation Error", "content": {"application/json": {"schema": {"$ref": "#/components/schemas/HTTPValidationError"}}}}}, "security": [{"APIKeyHeader": []}]}}, "/deployments/{deployment_id}": {"get": {"tags": ["deployments"], "summary": "Read Status Deployment", "operationId": "read_status_deployment_deployments__deployment_id__get", "parameters": [{"required": true, "schema": {"title": "Deployment Id", "type": "string"}, "name": "deployment_id", "in": "path"}], "responses": {"200": {"description": "Successful Response", "content": {"application/json": {"schema": {"title": "Response Read Status Deployment Deployments Deployment Id Get", "type": "array", "items": {"$ref": "#/components/schemas/DeploymentResponse"}}}}}, "422": {"description": "Validation Error", "content": {"application/json": {"schema": {"$ref": "#/components/schemas/HTTPValidationError"}}}}}, "security": [{"APIKeyHeader": []}]}}, "/deployments/{deployment_id}/{stage_id}/outputs": {"get": {"tags": ["deployments"], "summary": "Read Deployment Outputs", "operationId": "read_deployment_outputs_deployments__deployment_id___stage_id__outputs_get", "parameters": [{"required": true, "schema": {"title": "Deployment Id", "type": "string"}, "name": "deployment_id", "in": "path"}, {"required": true, "schema": {"title": "Stage Id", "type": "string"}, "name": "stage_id", "in": "path"}], "responses": {"200": {"description": "Successful Response", "content": {"application/json": {"schema": {"title": "Response Read Deployment Outputs Deployments Deployment Id Stage Id Outputs Get", "type": "object"}}}}, "422": {"description": "Validation Error", "content": {"application/json": {"schema": {"$ref": "#/components/schemas/HTTPValidationError"}}}}}, "security": [{"APIKeyHeader": []}]}}, "/undeploy/": {"post": {"tags": ["deployments"], "summary": "Undeploy", "operationId": "undeploy_undeploy__post", "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/DeleteDeploymentRequest"}}}, "required": true}, "responses": {"202": {"description": "Successful Response", "content": {"application/json": {"schema": {"$ref": "#/components/schemas/BaseResponse"}}}}, "422": {"description": "Validation Error", "content": {"application/json": {"schema": {"$ref": "#/components/schemas/HTTPValidationError"}}}}}, "security": [{"APIKeyHeader": []}]}}, "/deployments/{deployment_id}/self-healing": {"post": {"tags": ["deployments"], "summary": "Self Healing Strategy", "operationId": "self_healing_strategy_deployments__deployment_id__self_healing_post", "parameters": [{"required": true, "schema": {"title": "Deployment Id", "type": "string"}, "name": "deployment_id", "in": "path"}], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/SelfHealingRequest"}}}, "required": true}, "responses": {"201": {"description": "Successful Response", "content": {"application/json": {"schema": {"$ref": "#/components/schemas/BaseResponse"}}}}, "422": {"description": "Validation Error", "content": {"application/json": {"schema": {"$ref": "#/components/schemas/HTTPValidationError"}}}}}, "security": [{"APIKeyHeader": []}]}}, "/update-iac-bundle/": {"post": {"tags": ["deployments"], "summary": "Self Healing Bundle", "operationId": "self_healing_bundle_update_iac_bundle__post", "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/DeploymentRequest"}}}, "required": true}, "responses": {"201": {"description": "Successful Response", "content": {"application/json": {"schema": {"$ref": "#/components/schemas/BaseResponse"}}}}, "422": {"description": "Validation Error", "content": {"application/json": {"schema": {"$ref": "#/components/schemas/HTTPValidationError"}}}}}, "security": [{"APIKeyHeader": []}]}}}, "components": {"schemas": {"Aws": {"title": "Aws", "required": ["access_key_id", "secret_access_key"], "type": "object", "properties": {"access_key_id": {"title": "Access Key Id", "type": "string"}, "secret_access_key": {"title": "Secret Access Key", "type": "string"}, "region": {"title": "Region", "type": "string", "default": "us-west-2"}}}, "Azure": {"title": "Azure", "required": ["arm_client_id", "arm_client_secret", "arm_subscription_id", "arm_tenant_id"], "type": "object", "properties": {"arm_client_id": {"title": "Arm Client Id", "type": "string"}, "arm_client_secret": {"title": "Arm Client Secret", "type": "string"}, "arm_subscription_id": {"title": "Arm Subscription Id", "type": "string"}, "arm_tenant_id": {"title": "Arm Tenant Id", "type": "string"}}}, "BaseResponse": {"title": "BaseResponse", "required": ["message"], "type": "object", "properties": {"message": {"title": "Message", "type": "string"}}}, "Bundle": {"title": "Bundle", "required": ["base64"], "type": "object", "properties": {"base64": {"title": "Base64", "type": "string"}}}, "Credentials": {"title": "Credentials", "type": "object", "properties": {"aws": {"$ref": "#/components/schemas/Aws"}, "azure": {"$ref": "#/components/schemas/Azure"}, "openstack": {"$ref": "#/components/schemas/Openstack"}, "vmware": {"$ref": "#/components/schemas/Vmware"}, "docker": {"$ref": "#/components/schemas/Docker"}, "custom": {"title": "Custom", "type": "object"}}}, "DeleteDeploymentRequest": {"title": "DeleteDeploymentRequest", "required": ["deployment_id", "credentials"], "type": "object", "properties": {"deployment_id": {"title": "Deployment Id", "type": "string"}, "credentials": {"$ref": "#/components/schemas/Credentials"}}}, "DeploymentRequest": {"title": "DeploymentRequest", "required": ["deployment_id", "credentials", "bundle"], "type": "object", "properties": {"deployment_id": {"title": "Deployment Id", "type": "string"}, "credentials": {"$ref": "#/components/schemas/Credentials"}, "bundle": {"$ref": "#/components/schemas/Bundle"}}}, "DeploymentResponse": {"title": "DeploymentResponse", "required": ["status_time", "deployment_id", "status"], "type": "object", "properties": {"status_time": {"title": "Status Time", "type": "string", "format": "date-time"}, "deployment_id": {"title": "Deployment Id", "type": "string"}, "status": {"title": "Status", "type": "string"}, "stdout": {"title": "Stdout", "type": "string"}, "stderr": {"title": "Stderr", "type": "string"}}}, "Docker": {"title": "Docker", "required": ["server", "user_name", "password"], "type": "object", "properties": {"server": {"title": "Server", "type": "string"}, "user_name": {"title": "User Name", "type": "string"}, "password": {"title": "Password", "type": "string"}}}, "HTTPValidationError": {"title": "HTTPValidationError", "type": "object", "properties": {"detail": {"title": "Detail", "type": "array", "items": {"$ref": "#/components/schemas/ValidationError"}}}}, "Openstack": {"title": "Openstack", "required": ["user_name", "password", "auth_url", "project_name"], "type": "object", "properties": {"user_name": {"title": "User Name", "type": "string"}, "password": {"title": "Password", "type": "string"}, "auth_url": {"title": "Auth Url", "type": "string"}, "project_name": {"title": "Project Name", "type": "string"}, "region_name": {"title": "Region Name", "type": "string"}, "domain_name": {"title": "Domain Name", "type": "string"}, "project_domain_name": {"title": "Project Domain Name", "type": "string"}, "user_domain_name": {"title": "User Domain Name", "type": "string"}}}, "SelfHealingRequest": {"title": "SelfHealingRequest", "required": ["credentials", "playbook"], "type": "object", "properties": {"credentials": {"$ref": "#/components/schemas/Credentials"}, "playbook": {"title": "Playbook", "type": "string"}}}, "ValidationError": {"title": "ValidationError", "required": ["loc", "msg", "type"], "type": "object", "properties": {"loc": {"title": "Location", "type": "array", "items": {"type": "string"}}, "msg": {"title": "Message", "type": "string"}, "type": {"title": "Error Type", "type": "string"}}}, "Vmware": {"title": "Vmware", "required": ["user_name", "password", "server"], "type": "object", "properties": {"user_name": {"title": "User Name", "type": "string"}, "password": {"title": "Password", "type": "string"}, "server": {"title": "Server", "type": "string"}, "allow_unverified_ssl": {"title": "Allow Unverified Ssl", "type": "string"}}}}, "securitySchemes": {"APIKeyHeader": {"type": "apiKey", "in": "header", "name": "x-api-key"}}}} \ No newline at end of file diff --git a/sonar-project.properties b/sonar-project.properties new file mode 100644 index 0000000000000000000000000000000000000000..9f5f8e01990a469c544513db1bf32a8726f0ea71 --- /dev/null +++ b/sonar-project.properties @@ -0,0 +1,5 @@ +sonar.projectKey=piacere_private_t51-iem_AXlg6OYJGykB3kuTt_u4 +sonar.qualitygate.wait=true +sonar.sources=iem-api/src +sonar.python.coverage.reportPaths=iem-api/coverage.xml +