Browse Source
Puts into place the DeploymentConfiguration yaml that provides the options that should be configured by the site design to the deployment (and update) workflows. This change additionally refactors reused parts to common modules as related to info passing (xcom) Change-Id: Ib6470899b204dbc18d2a9a2e4f95540b3b0032b0changes/87/569187/1
39 changed files with 1220 additions and 492 deletions
@ -0,0 +1,175 @@
|
||||
# Copyright 2018 AT&T Intellectual Property. All other rights reserved. |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
from airflow.operators import ConcurrencyCheckOperator |
||||
from airflow.operators.python_operator import PythonOperator |
||||
from airflow.operators.subdag_operator import SubDagOperator |
||||
|
||||
from armada_deploy_site import deploy_site_armada |
||||
import dag_names as dn |
||||
from deckhand_get_design import get_design_deckhand |
||||
from destroy_node import destroy_server |
||||
from drydock_deploy_site import deploy_site_drydock |
||||
from failure_handlers import step_failure_handler |
||||
from dag_deployment_configuration import get_deployment_configuration |
||||
from preflight_checks import all_preflight_checks |
||||
from validate_site_design import validate_site_design |
||||
|
||||
|
||||
class CommonStepFactory(object): |
||||
"""Common step factory |
||||
|
||||
A factory to generate steps that are reused among multiple dags |
||||
""" |
||||
def __init__(self, parent_dag_name, dag, default_args): |
||||
"""Creates a factory |
||||
|
||||
Uses the specified parent_dag_name |
||||
""" |
||||
self.parent_dag_name = parent_dag_name |
||||
self.dag = dag |
||||
self.default_args = default_args |
||||
|
||||
def get_action_xcom(self, task_id=dn.ACTION_XCOM): |
||||
"""Generate the action_xcom step |
||||
|
||||
Step responsible for getting the action information passed |
||||
by the invocation of the dag, which includes any options. |
||||
""" |
||||
def xcom_push(**kwargs): |
||||
"""xcom_push function |
||||
|
||||
Defines a push function to store the content of 'action' that is |
||||
defined via 'dag_run' in XCOM so that it can be used by the |
||||
Operators |
||||
""" |
||||
|
||||
kwargs['ti'].xcom_push(key='action', |
||||
value=kwargs['dag_run'].conf['action']) |
||||
|
||||
return PythonOperator(task_id=task_id, |
||||
dag=self.dag, |
||||
python_callable=xcom_push) |
||||
|
||||
def get_concurrency_check(self, task_id=dn.DAG_CONCURRENCY_CHECK_DAG_NAME): |
||||
"""Generate the concurrency check step |
||||
|
||||
Concurrency check prevents simultaneous execution of dags that should |
||||
not execute together. |
||||
""" |
||||
return ConcurrencyCheckOperator( |
||||
task_id=task_id, |
||||
on_failure_callback=step_failure_handler, |
||||
dag=self.dag) |
||||
|
||||
def get_preflight(self, task_id=dn.ALL_PREFLIGHT_CHECKS_DAG_NAME): |
||||
"""Generate the preflight step |
||||
|
||||
Preflight checks preconditions for running a DAG |
||||
""" |
||||
return SubDagOperator( |
||||
subdag=all_preflight_checks( |
||||
self.parent_dag_name, |
||||
task_id, |
||||
args=self.default_args), |
||||
task_id=task_id, |
||||
on_failure_callback=step_failure_handler, |
||||
dag=self.dag) |
||||
|
||||
def get_get_design_version(self, task_id=dn.DECKHAND_GET_DESIGN_VERSION): |
||||
"""Generate the get design version step |
||||
|
||||
Retrieves the version of the design to use from deckhand |
||||
""" |
||||
return SubDagOperator( |
||||
subdag=get_design_deckhand( |
||||
self.parent_dag_name, |
||||
task_id, |
||||
args=self.default_args), |
||||
task_id=task_id, |
||||
on_failure_callback=step_failure_handler, |
||||
dag=self.dag) |
||||
|
||||
def get_validate_site_design(self, |
||||
task_id=dn.VALIDATE_SITE_DESIGN_DAG_NAME): |
||||
"""Generate the validate site design step |
||||
|
||||
Validation of the site design checks that the design to be used |
||||
for a deployment passes checks before using it. |
||||
""" |
||||
return SubDagOperator( |
||||
subdag=validate_site_design( |
||||
self.parent_dag_name, |
||||
task_id, |
||||
args=self.default_args), |
||||
task_id=task_id, |
||||
on_failure_callback=step_failure_handler, |
||||
dag=self.dag) |
||||
|
||||
def get_deployment_configuration(self, |
||||
task_id=dn.GET_DEPLOY_CONF_DAG_NAME): |
||||
"""Generate the step to retrieve the deployment configuration |
||||
|
||||
This step provides the timings and strategies that will be used in |
||||
subsequent steps |
||||
""" |
||||
return SubDagOperator( |
||||
subdag=get_deployment_configuration( |
||||
self.parent_dag_name, |
||||
task_id, |
||||
args=self.default_args), |
||||
task_id=task_id, |
||||
on_failure_callback=step_failure_handler, |
||||
dag=self.dag) |
||||
|
||||
def get_drydock_build(self, task_id=dn.DRYDOCK_BUILD_DAG_NAME): |
||||
"""Generate the drydock build step |
||||
|
||||
Drydock build does the hardware provisioning. |
||||
""" |
||||
return SubDagOperator( |
||||
subdag=deploy_site_drydock( |
||||
self.parent_dag_name, |
||||
task_id, |
||||
args=self.default_args), |
||||
task_id=task_id, |
||||
on_failure_callback=step_failure_handler, |
||||
dag=self.dag) |
||||
|
||||
def get_armada_build(self, task_id=dn.ARMADA_BUILD_DAG_NAME): |
||||
"""Generate the armada build step |
||||
|
||||
Armada build does the deployment of helm charts |
||||
""" |
||||
return SubDagOperator( |
||||
subdag=deploy_site_armada( |
||||
self.parent_dag_name, |
||||
task_id, |
||||
args=self.default_args), |
||||
task_id=task_id, |
||||
on_failure_callback=step_failure_handler, |
||||
dag=self.dag) |
||||
|
||||
def get_destroy_server(self, task_id=dn.DESTROY_SERVER_DAG_NAME): |
||||
"""Generate a destroy server step |
||||
|
||||
Destroy server tears down kubernetes and hardware |
||||
""" |
||||
return SubDagOperator( |
||||
subdag=destroy_server( |
||||
self.parent_dag_name, |
||||
task_id, |
||||
args=self.default_args), |
||||
task_id=task_id, |
||||
on_failure_callback=step_failure_handler, |
||||
dag=self.dag) |
@ -0,0 +1,18 @@
|
||||
# Copyright 2018 AT&T Intellectual Property. All other rights reserved. |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
|
||||
# Location of shiyard.conf |
||||
# Note that the shipyard.conf file needs to be placed on a volume |
||||
# that can be accessed by the containers |
||||
config_path = '/usr/local/airflow/plugins/shipyard.conf' |
@ -0,0 +1,36 @@
|
||||
# Copyright 2018 AT&T Intellectual Property. All other rights reserved. |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
|
||||
from airflow.models import DAG |
||||
from airflow.operators import DeploymentConfigurationOperator |
||||
|
||||
from config_path import config_path |
||||
|
||||
|
||||
GET_DEPLOYMENT_CONFIGURATION_NAME = 'get_deployment_configuration' |
||||
|
||||
|
||||
def get_deployment_configuration(parent_dag_name, child_dag_name, args): |
||||
"""DAG to retrieve deployment configuration""" |
||||
dag = DAG( |
||||
'{}.{}'.format(parent_dag_name, child_dag_name), |
||||
default_args=args) |
||||
|
||||
deployment_configuration = DeploymentConfigurationOperator( |
||||
task_id=GET_DEPLOYMENT_CONFIGURATION_NAME, |
||||
shipyard_conf=config_path, |
||||
main_dag_name=parent_dag_name, |
||||
dag=dag) |
||||
|
||||
return dag |
@ -0,0 +1,26 @@
|
||||
# Copyright 2018 AT&T Intellectual Property. All other rights reserved. |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
|
||||
# Subdags |
||||
ALL_PREFLIGHT_CHECKS_DAG_NAME = 'preflight' |
||||
ARMADA_BUILD_DAG_NAME = 'armada_build' |
||||
DAG_CONCURRENCY_CHECK_DAG_NAME = 'dag_concurrency_check' |
||||
DECKHAND_GET_DESIGN_VERSION = 'deckhand_get_design_version' |
||||
GET_DEPLOY_CONF_DAG_NAME = 'dag_deployment_configuration' |
||||
DRYDOCK_BUILD_DAG_NAME = 'drydock_build' |
||||
VALIDATE_SITE_DESIGN_DAG_NAME = 'validate_site_design' |
||||
DESTROY_SERVER_DAG_NAME = 'destroy_server' |
||||
|
||||
# Steps |
||||
ACTION_XCOM = 'action_xcom' |
@ -0,0 +1,66 @@
|
||||
# Copyright 2018 AT&T Intellectual Property. All other rights reserved. |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
import configparser |
||||
import logging |
||||
|
||||
from keystoneauth1.identity import v3 as keystone_v3 |
||||
from keystoneauth1 import session as keystone_session |
||||
|
||||
from deckhand.client import client as deckhand_client |
||||
|
||||
LOG = logging.getLogger(__name__) |
||||
|
||||
|
||||
class DeckhandClientFactory(object): |
||||
"""Factory for DeckhandClient to encapsulate commonly reused setup""" |
||||
|
||||
def __init__(self, |
||||
shipyard_conf, |
||||
*args, **kwargs): |
||||
"""Deckhand Client Factory |
||||
|
||||
Creates a client factory to retrieve clients |
||||
:param shipyard_conf: Location of shipyard.conf |
||||
""" |
||||
self.config = configparser.ConfigParser() |
||||
self.config.read(shipyard_conf) |
||||
|
||||
def get_client(self): |
||||
"""Retrieve a deckhand client""" |
||||
|
||||
""" |
||||
Notes: |
||||
TODO(bryan-strassner): If/when the airflow plugin modules move to using |
||||
oslo config, consider using the example here: |
||||
https://github.com/att-comdev/deckhand/blob/cef3b52a104e620e88a24caf70ed2bb1297c268f/deckhand/barbican/client_wrapper.py#L53 |
||||
which will load the attributes from the config more flexibly. |
||||
Keystoneauth1 also provides for a simpler solution with: |
||||
https://docs.openstack.org/keystoneauth/latest/api/keystoneauth1.loading.html |
||||
if oslo config is used. |
||||
""" |
||||
keystone_auth = {} |
||||
# Construct Session Argument |
||||
for attr in ('auth_url', 'password', 'project_domain_name', |
||||
'project_name', 'username', 'user_domain_name'): |
||||
keystone_auth[attr] = self.config.get('keystone_authtoken', attr) |
||||
|
||||
# Set up keystone session |
||||
auth = keystone_v3.Password(**keystone_auth) |
||||
sess = keystone_session.Session(auth=auth) |
||||
|
||||
LOG.info("Setting up Deckhand client with parameters") |
||||
for attr in keystone_auth: |
||||
if attr != 'password': |
||||
LOG.debug('%s = %s', attr, keystone_auth[attr]) |
||||
return deckhand_client.Client(session=sess, endpoint_type='internal') |
@ -0,0 +1,178 @@
|
||||
# Copyright 2017 AT&T Intellectual Property. All other rights reserved. |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
"""Deployment Configuration |
||||
|
||||
Retrieves the deployment configuration from Deckhand and places the values |
||||
retrieved into a dictionary |
||||
""" |
||||
import logging |
||||
|
||||
from airflow.exceptions import AirflowException |
||||
from airflow.models import BaseOperator |
||||
from airflow.plugins_manager import AirflowPlugin |
||||
from airflow.utils.decorators import apply_defaults |
||||
|
||||
try: |
||||
from deckhand_client_factory import DeckhandClientFactory |
||||
except ImportError: |
||||
from shipyard_airflow.plugins.deckhand_client_factory import ( |
||||
DeckhandClientFactory |
||||
) |
||||
|
||||
LOG = logging.getLogger(__name__) |
||||
|
||||
|
||||
class DeploymentConfigurationOperator(BaseOperator): |
||||
"""Deployment Configuration Operator |
||||
|
||||
Retrieve the deployment configuration from Deckhand for use throughout |
||||
the workflow. Put the configuration into a dictionary. |
||||
|
||||
Failures are raised: |
||||
- when Deckhand cannot be contacted |
||||
- when the DeploymentConfiguration (deployment-configuration) document |
||||
cannot be retrieved |
||||
""" |
||||
config_keys_defaults = { |
||||
"physical_provisioner.deployment_strategy": "all-at-once", |
||||
"physical_provisioner.deploy_interval": 30, |
||||
"physical_provisioner.deploy_timeout": 3600, |
||||
"physical_provisioner.destroy_interval": 30, |
||||
"physical_provisioner.destroy_timeout": 900, |
||||
"physical_provisioner.join_wait": 120, |
||||
"physical_provisioner.prepare_node_interval": 30, |
||||
"physical_provisioner.prepare_node_timeout": 1000, |
||||
"physical_provisioner.prepare_site_interval": 10, |
||||
"physical_provisioner.prepare_site_timeout": 300, |
||||
"physical_provisioner.verify_interval": 10, |
||||
"physical_provisioner.verify_timeout": 60, |
||||
"kubernetes.node_status_interval": 30, |
||||
"kubernetes.node_status_timeout": 1800, |
||||
"kubernetes_provisioner.drain_timeout": 3600, |
||||
"kubernetes_provisioner.drain_grace_period": 1800, |
||||
"kubernetes_provisioner.clear_labels_timeout": 1800, |
||||
"kubernetes_provisioner.remove_etcd_timeout": 1800, |
||||
"kubernetes_provisioner.etcd_ready_timeout": 600, |
||||
"armada.manifest": "full-site" |
||||
} |
||||
|
||||
@apply_defaults |
||||
def __init__(self, |
||||
main_dag_name=None, |
||||
shipyard_conf=None, |
||||
*args, **kwargs): |
||||
"""Deployment Configuration Operator |
||||
|
||||
Generate a DeploymentConfigurationOperator to read the deployment's |
||||
configuration for use by other operators |
||||
|
||||
:param main_dag_name: Parent Dag |
||||
:param shipyard_conf: Location of shipyard.conf |
||||
""" |
||||
|
||||
super(DeploymentConfigurationOperator, self).__init__(*args, **kwargs) |
||||
self.main_dag_name = main_dag_name |
||||
self.shipyard_conf = shipyard_conf |
||||
|
||||
def execute(self, context): |
||||
"""Perform Deployment Configuration extraction""" |
||||
|
||||
revision_id = self.get_revision_id(context.get('task_instance')) |
||||
doc = self.get_doc(revision_id) |
||||
converted = self.map_config_keys(doc) |
||||
# return the mapped configuration so that it can be placed on xcom |
||||
return converted |
||||
|
||||
def get_revision_id(self, task_instance): |
||||
"""Get the revision id from xcom""" |
||||
if task_instance: |
||||
LOG.debug("task_instance found, extracting design version") |
||||
# Set the revision_id to the revision on the xcom |
||||
revision_id = task_instance.xcom_pull( |
||||
task_ids='deckhand_get_design_version', |
||||
dag_id=self.main_dag_name + '.deckhand_get_design_version') |
||||
if revision_id: |
||||
LOG.info("Revision is set to: %s for deployment configuration", |
||||
revision_id) |
||||
return revision_id |
||||
# either revision id was not on xcom, or the task_instance is messed |
||||
raise AirflowException( |
||||
"Design_revision is not set. Cannot proceed with retrieval of" |
||||
" the design configuration" |
||||
) |