Skip to content
Snippets Groups Projects
Commit 8b0280d4 authored by Debora Benedetto's avatar Debora Benedetto
Browse files

refactoring

parent 121bdd0e
No related branches found
No related tags found
No related merge requests found
import logging
import json
import tarfile
import uuid
from fastapi import APIRouter, Body
from fastapi.responses import FileResponse
from controller.PluginOrchestrator import create_infrastructure_files
from icgparser import ModelParser
from controller import Orchestrator
api_router = APIRouter()
......@@ -16,58 +11,16 @@ base_compress_file_name = "iac_files_"
@api_router.post("/infrastructure/files")
def create_iac_from_intermediate_representation(intermediate_representation: dict = Body(...)):
logging.info("Received intermediate representation create_iac_from_intermediate_representation request")
template_generated_folder = create_infrastructure_files(intermediate_representation)
compress_file_name = random_file_name_generation(base_compress_file_name)
compress_file_path = compress_file(template_generated_folder, compress_file_name)
return FileResponse(compress_file_path, media_type='application/octet-stream', filename=compress_file_name)
compress_folder_info = Orchestrator.create_iac_from_intermediate_representation(intermediate_representation)
return FileResponse(compress_folder_info.file_path, media_type='application/octet-stream',
filename=compress_folder_info.filename)
@api_router.post("/iac/files")
def create_iac_from_doml(data: str = Body(..., media_type="application/xml")):
logging.info("Received create_iac_from_doml request")
temp_model_file_path = "icgparser/doml/nginx-openstack.domlx"
logging.info("Writing model file in temp folder '%s' for parsing", temp_model_file_path)
f = open(temp_model_file_path, "w")
f.write(data)
f.close()
intermediate_representation = ModelParser.parse_model(model_path=temp_model_file_path)
intermediate_representation = reorganize_info(intermediate_representation)
save(intermediate_representation, "input_file_generated/ir.json")
template_generated_folder = create_infrastructure_files(intermediate_representation)
compress_file_name = random_file_name_generation(base_compress_file_name)
compress_file_folder = compress_file(template_generated_folder, compress_file_name)
return FileResponse(compress_file_folder,
media_type='application/octet-stream',
filename=compress_file_name)
def random_file_name_generation(base_name):
return base_name + str(uuid.uuid4().hex) + ".tar.gz"
def compress_file(source_folder, dest_file_name):
# prefix_path = "/opt/"
prefix_path = ""
logging.info("Compressing folder %s into destination %s", prefix_path + source_folder,
prefix_path + dest_file_name)
with tarfile.open(prefix_path + dest_file_name, "w:gz") as tar:
tar.add(source_folder, arcname='.')
return prefix_path + dest_file_name
def save(data, file_path):
file = open(file_path, "w")
if isinstance(data, dict):
data = json.dumps(data, indent=2, sort_keys=True)
print(data)
file.write(data)
file.close()
def reorganize_info(intermediate_repr):
computing_group_list = []
groups = intermediate_repr["steps"][0]["data"]["computingGroup"][0]
for key in groups:
if not key == "name":
computing_group_list.append(groups[key])
intermediate_repr["steps"][0]["data"]["computingGroup"] = computing_group_list
return intermediate_repr
compress_folder_info = Orchestrator.create_iac_from_doml(model=data, metamodel_directory="icgparser/doml",
is_multiecore_metamodel=False)
logging.info(f"file_path: {compress_folder_info.file_path}, filename: {compress_folder_info.filename}")
return FileResponse(path=compress_folder_info.filename, media_type='application/octet-stream', ## TODO change path into compress_folder_info.file_path
filename=compress_folder_info.filename)
import json
import logging
import tarfile
import uuid
from icgparser import ModelParser
from plugin import AnsiblePlugin, TerraformPlugin
class CompressFolder:
def __init__(self, file_path, filename):
self.file_path = file_path,
self.filename = filename
def create_infrastructure_files(intermediate_representation: dict):
template_generated_folder = intermediate_representation["output_path"]
choose_plugin(intermediate_representation, template_generated_folder)
logging.info("iac files available at %s", template_generated_folder)
return template_generated_folder
def choose_plugin(parameters, template_generated_folder):
# os.system('rm -f /opt/output_files_generated/*')
logging.info("Choosing plugin")
for step in parameters["steps"]:
if step["programming_language"] == "ansible":
logging.info("Ansible Plugin chosen")
input_data = step["data"]
AnsiblePlugin.create_files(input_data, template_generated_folder)
elif step["programming_language"] == "terraform":
logging.info("Terraform Plugin chosen")
input_data = step["data"]
TerraformPlugin.create_files(input_data, template_generated_folder)
def create_temp_file_for_model(model, output_folder):
logging.info(f"Writing model file in temp folder at {output_folder} for parsing")
def save_file(data, file_path):
logging.info(f"Saving data at: {file_path}")
file = open(file_path, "w")
if isinstance(data, dict):
data = json.dumps(data, indent=2, sort_keys=True)
print(data)
file.write(data)
file.close()
def reorganize_info(intermediate_repr):
logging.info("Reorganizing intermediate representation")
computing_group_list = []
if "computingGroup" in intermediate_repr["steps"][0]["data"].keys():
groups = intermediate_repr["steps"][0]["data"]["computingGroup"][0]
for key in groups:
if not key == "name":
computing_group_list.append(groups[key])
intermediate_repr["steps"][0]["data"]["computingGroup"] = computing_group_list
return intermediate_repr
def random_file_name_generation(base_name):
return base_name + str(uuid.uuid4().hex) + ".tar.gz"
def compress_file(source_folder, dest_file_name):
# prefix_path = "/opt/"
prefix_path = ""
folder_path = prefix_path + dest_file_name + ""
logging.info(f"Compressing folder {source_folder} into destination {folder_path}")
with tarfile.open(folder_path, "w:gz") as tar:
tar.add(source_folder, arcname='.')
return folder_path
def create_temp_model_file(model_xml):
logging.info("Saving model in temp file")
temp_model_file_path = "icgparser/doml/nginx-openstack.domlx"
save_file(model_xml, temp_model_file_path)
logging.info(f"Successfully saved model in temp file at {temp_model_file_path}")
return temp_model_file_path
def create_intermediate_representation(model_path, is_multiecore_metamodel, metamodel_directory):
logging.info("Calling ICG Parser for creating intermediate representation")
intermediate_representation = ModelParser.parse_model(model_path=model_path,
is_multiecore_metamodel=is_multiecore_metamodel,
metamodel_directory=metamodel_directory)
intermediate_representation = reorganize_info(intermediate_representation)
logging.info("Successfully created intermediate representation")
intermediate_representation_path = "input_file_generated/ir.json"
save_file(intermediate_representation, intermediate_representation_path)
logging.info(f"Saved intermediate representation at {intermediate_representation_path}")
return intermediate_representation
def compress_iac_folder(template_generated_folder):
base_compress_file_name = "iac_files_"
compress_file_name = random_file_name_generation(base_compress_file_name)
compress_file_folder_path = compress_file(template_generated_folder, compress_file_name)
logging.info(f"Successfully created iac files, available at {compress_file_folder_path}")
compress_folder_info = CompressFolder(file_path=compress_file_folder_path, filename=compress_file_name)
logging.info(f"######################### {compress_folder_info.file_path}") ## TODO fix, is tuple instead of string
return compress_folder_info
def create_iac_from_intermediate_representation(intermediate_representation):
logging.info("Creating iac files")
template_generated_folder = create_infrastructure_files(intermediate_representation)
return template_generated_folder
def create_iac_from_doml(model, is_multiecore_metamodel, metamodel_directory):
logging.info("Creating iac files: parse and plugins will be called")
model_path = create_temp_model_file(model_xml=model)
intermediate_representation = create_intermediate_representation(model_path, is_multiecore_metamodel,
metamodel_directory)
template_generated_folder = create_iac_from_intermediate_representation(intermediate_representation)
compress_folder_info = compress_iac_folder(template_generated_folder)
return compress_folder_info
def create_iac_from_doml_path(model_path, is_multiecore_metamodel, metamodel_directory):
intermediate_representation = create_intermediate_representation(model_path, is_multiecore_metamodel,
metamodel_directory)
template_generated_folder = create_iac_from_intermediate_representation(intermediate_representation)
compress_folder_info = compress_iac_folder(template_generated_folder)
return compress_folder_info
import logging
from plugin import AnsiblePlugin, TerraformPlugin
def create_infrastructure_files(intermediate_representation: dict):
template_generated_folder = intermediate_representation["output_path"]
choose_plugin(intermediate_representation, template_generated_folder)
logging.info("iac files available at %s", template_generated_folder)
return template_generated_folder
def choose_plugin(parameters, template_generated_folder):
# os.system('rm -f /opt/output_files_generated/*')
logging.info("Choosing plugin")
for step in parameters["steps"]:
if step["programming_language"] == "ansible":
logging.info("Ansible Plugin chosen")
input_data = step["data"]
AnsiblePlugin.create_files(input_data, template_generated_folder)
elif step["programming_language"] == "terraform":
logging.info("Terraform Plugin chosen")
input_data = step["data"]
TerraformPlugin.create_files(input_data, template_generated_folder)
......@@ -88,4 +88,5 @@ def parse_model(model_path, is_multiecore_metamodel, metamodel_directory):
rset = DomlParserUtilities.load_metamodel(metamodel_directory=metamodel_directory,
is_multiecore=is_multiecore_metamodel)
doml_model = DomlParserUtilities.load_model(model_path, rset)
create_intermediate_representation(doml_model)
intermediate_representation = create_intermediate_representation(doml_model)
return intermediate_representation
......@@ -3,48 +3,11 @@
"steps": [
{
"data": {
"computingGroup": [
"credentials": [
{
"addressRanges": [
"0.0.0.0/0",
"::/0"
],
"kind": "EGRESS",
"name": "out_all",
"protocol": "-1"
},
{
"addressRanges": [
"0.0.0.0/0",
"::/0"
],
"fromPort": 80,
"kind": "INGRESS",
"name": "http",
"protocol": "tcp",
"toPort": 80
},
{
"addressRanges": [
"0.0.0.0/0",
"::/0"
],
"fromPort": 443,
"kind": "INGRESS",
"name": "https",
"protocol": "tcp",
"toPort": 443
},
{
"addressRanges": [
"0.0.0.0/0",
"::/0"
],
"fromPort": 22,
"kind": "INGRESS",
"name": "ssh",
"protocol": "tcp",
"toPort": 22
"algorithm": "RSA",
"bits": 4096,
"name": "ssh_key"
}
],
"networks": [
......
import json
import logging
import sys
from fastapi import FastAPI
import api.InfrastructureTemplateController
from api.InfrastructureTemplateController import compress_file
from controller.PluginOrchestrator import create_infrastructure_files
from icgparser import ModelParser, ModelPrinter
from controller import Orchestrator
from icgparser import ModelPrinter
fast_api = FastAPI()
......@@ -23,6 +19,7 @@ model_filename = "./nginx-openstack_v2_multiecores.domlx"
load_split_model = True
output_file_name = "iac_files.tar.gz"
# get metamodel directory from command line
def param_dir(pos, list):
global doml_directory
......@@ -31,17 +28,21 @@ def param_dir(pos, list):
print(f" doml_directory = {doml_directory} model_filename = {model_filename}")
skip_next = True
def param_help(pos, list):
print(f"\nUsage: {sys.argv[0]} [-h] [-d <doml_directory>] [--single] <model_filename>\n")
sys.exit()
# indicate to load the single-file metamodel (doml.ecore) instead of the split one
def param_single(pos, list):
global load_split_model
print(f"--> param_single({pos},{list}")
load_split_model = False
options = {'-d': param_dir, '-h': param_help, '--single_mmodel': param_single, '--single': param_single, '--output': output_file_name}
options = {'-d': param_dir, '-h': param_help, '--single_mmodel': param_single, '--single': param_single,
'--output': output_file_name}
argc = len(sys.argv)
paramlist = sys.argv[1:]
for i, param in enumerate(paramlist):
......@@ -58,9 +59,7 @@ for i, param in enumerate(paramlist):
if __name__ == '__main__':
ModelPrinter.print_model(model_path=model_filename, is_multiecore_metamodel=load_split_model,
metamodel_directory=doml_directory)
# ModelParser.parse_model(model_path=model_filename, is_multiecore_metamodel=load_split_model,
# metamodel_directory=doml_directory)
# with open("input_file_generated/ir.json") as json_file:
# data = json.load(json_file)
# template_generated_folder = create_infrastructure_files(data)
# compress_file_folder = compress_file(template_generated_folder, output_file_name)
compress_folder_info = Orchestrator.create_iac_from_doml_path(model_path=model_filename,
is_multiecore_metamodel=load_split_model,
metamodel_directory=doml_directory)
......@@ -35,72 +35,33 @@ data "openstack_networking_secgroup_v2" "default" {
name = "default"
tenant_id = data.openstack_identity_project_v3.test_tenant.id
}
resource "openstack_compute_secgroup_v2" "out_all" {
name = "out_all"
description = "Security group rule for port -"
rule {
from_port =
to_port =
ip_protocol = "-1"
cidr = [
0.0.0.0/0,
::/0,
]
# Create virtual machine
resource "openstack_compute_instance_v2" "vm1" {
name = "nginx-host"
image_name = "i1"
flavor_name = "small"
key_pair = openstack_compute_keypair_v2.ssh_key.name
network {
port = openstack_networking_port_v2.net1.id
}
}
resource "openstack_compute_secgroup_v2" "http" {
name = "http"
description = "Security group rule for port -"
rule {
from_port = 80
to_port = 80
ip_protocol = "tcp"
cidr = [
0.0.0.0/0,
::/0,
]
}
# Create ssh keys
resource "openstack_compute_keypair_v2" "ssh_key" {
name = "ubuntu"
public_key = "/home/user1/.ssh/openstack.key"
}
resource "openstack_compute_secgroup_v2" "https" {
name = "https"
description = "Security group rule for port -"
rule {
from_port = 443
to_port = 443
ip_protocol = "tcp"
cidr = [
0.0.0.0/0,
::/0,
]
}
# Create floating ip
resource "openstack_networking_floatingip_v2" "vm1_floating_ip" {
pool = "external"
# fixed_ip = ""
}
resource "openstack_compute_secgroup_v2" "ssh" {
name = "ssh"
description = "Security group rule for port -"
rule {
from_port = 22
to_port = 22
ip_protocol = "tcp"
cidr = [
0.0.0.0/0,
::/0,
]
}
# Attach floating ip to instance
resource "openstack_compute_floatingip_associate_v2" "vm1_floating_ip_association" {
floating_ip = openstack_networking_floatingip_v2.vm1_floating_ip.address
instance_id = openstack_compute_instance_v2.vm1.id
}
## Network
......@@ -142,32 +103,3 @@ resource "openstack_networking_router_interface_v2" "net1_router_interface" {
subnet_id = openstack_networking_subnet_v2.net1_subnet.id
}
# Create virtual machine
resource "openstack_compute_instance_v2" "vm1" {
name = "nginx-host"
image_name = "i1"
flavor_name = "small"
key_pair = openstack_compute_keypair_v2.ssh_key.name
network {
port = openstack_networking_port_v2.net1.id
}
}
# Create ssh keys
resource "openstack_compute_keypair_v2" "ssh_key" {
name = "ubuntu"
public_key = "/home/user1/.ssh/openstack.key"
}
# Create floating ip
resource "openstack_networking_floatingip_v2" "vm1_floating_ip" {
pool = "external"
# fixed_ip = ""
}
# Attach floating ip to instance
resource "openstack_compute_floatingip_associate_v2" "vm1_floating_ip_association" {
floating_ip = openstack_networking_floatingip_v2.vm1_floating_ip.address
instance_id = openstack_compute_instance_v2.vm1.id
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment