Update OpenStack Operator

The existing OpenStack operator will only work for
Python 2.7 Airflow Containers and will fail to function
properly for Airflow Containers running on Python 3.x

This update will fix the problems with the environment
variables.

The current assumption is that the shipyard.conf will
contain the required information for the OpenStack
environment and it will be consumed by Airflow Dag to
execute the OpenStack commands.  This will likely change
in future when DeckHand is ready for integration as the
required information will be retrieved from DeckHand instead

The xcom portion has been removed as it does not really
serve any purpose at the moment.  It might get added back
in future depending on the needs.

Change-Id: I0c5fb56462fdbc43a20897373118f510cbb2e01c
This commit is contained in:
Anthony Lin
2017-08-18 00:35:09 +00:00
parent 8526e18058
commit b66d39e468
2 changed files with 29 additions and 39 deletions

View File

@@ -17,13 +17,13 @@
import airflow import airflow
from airflow import DAG from airflow import DAG
from airflow.operators import OpenStackOperator from airflow.operators import OpenStackOperator
from airflow.operators.bash_operator import BashOperator
from datetime import timedelta from datetime import timedelta
default_args = { default_args = {
'owner': 'airflow', 'owner': 'airflow',
'depends_on_past': False, 'depends_on_past': False,
'start_date': airflow.utils.dates.days_ago(2), 'start_date': airflow.utils.dates.days_ago(1),
'email': ['airflow@example.com'], 'email': ['airflow@example.com'],
'email_on_failure': False, 'email_on_failure': False,
'email_on_retry': False, 'email_on_retry': False,
@@ -33,41 +33,33 @@ default_args = {
dag = DAG('openstack_cli', default_args=default_args, schedule_interval=None) dag = DAG('openstack_cli', default_args=default_args, schedule_interval=None)
# print_date # Location of shiyard.conf
t1 = BashOperator(task_id='print_date', bash_command='date', dag=dag) config_path = '/usr/local/airflow/plugins/shipyard.conf'
# Note that the openrc.sh file needs to be placed on a volume that can be # Note that the shipyard.conf file needs to be placed on a volume
# accessed by the containers # that can be accessed by the containers
# openstack endpoint list # openstack endpoint list
t2 = OpenStackOperator( t1 = OpenStackOperator(
task_id='endpoint_list_task', task_id='endpoint_list_task',
openrc_file='/usr/local/airflow/dags/openrc.sh', shipyard_conf=config_path,
openstack_command=['openstack', 'endpoint', 'list'], openstack_command=['openstack', 'endpoint', 'list'],
dag=dag) dag=dag)
# openstack service list # openstack service list
t3 = OpenStackOperator( t2 = OpenStackOperator(
task_id='service_list_task', task_id='service_list_task',
openrc_file='/usr/local/airflow/dags/openrc.sh', shipyard_conf=config_path,
openstack_command=['openstack', 'service', 'list'], openstack_command=['openstack', 'service', 'list'],
dag=dag) dag=dag)
# openstack server list # openstack server list
t4 = OpenStackOperator( t3 = OpenStackOperator(
task_id='server_list_task', task_id='server_list_task',
openrc_file='/usr/local/airflow/dags/openrc.sh', shipyard_conf=config_path,
openstack_command=['openstack', 'server', 'list'], openstack_command=['openstack', 'server', 'list'],
dag=dag) dag=dag)
# openstack network list
t5 = OpenStackOperator(
task_id='network_list_task',
openrc_file='/usr/local/airflow/dags/openrc.sh',
openstack_command=['openstack', 'network', 'list'],
dag=dag)
t2.set_upstream(t1) t2.set_upstream(t1)
t3.set_upstream(t1) t3.set_upstream(t2)
t4.set_upstream(t1)
t5.set_upstream(t1)

View File

@@ -14,6 +14,8 @@
import logging import logging
import subprocess import subprocess
import os
import configparser
from airflow.exceptions import AirflowException from airflow.exceptions import AirflowException
from airflow.models import BaseOperator from airflow.models import BaseOperator
@@ -24,36 +26,37 @@ from airflow.utils.decorators import apply_defaults
class OpenStackOperator(BaseOperator): class OpenStackOperator(BaseOperator):
""" """
Performs OpenStack CLI calls Performs OpenStack CLI calls
:openrc_file: Path of the openrc file :shipyard_conf: Location of shipyard.conf
:openstack_command: The OpenStack command to be executed :openstack_command: The OpenStack command to be executed
""" """
@apply_defaults @apply_defaults
def __init__(self, def __init__(self,
openrc_file, shipyard_conf,
openstack_command=None, openstack_command=None,
xcom_push=False, xcom_push=False,
*args, *args, **kwargs):
**kwargs):
super(OpenStackOperator, self).__init__(*args, **kwargs) super(OpenStackOperator, self).__init__(*args, **kwargs)
self.openrc_file = openrc_file self.shipyard_conf = shipyard_conf
self.openstack_command = openstack_command self.openstack_command = openstack_command
self.xcom_push_flag = xcom_push self.xcom_push_flag = xcom_push
def execute(self, context): def execute(self, context):
logging.info("Running OpenStack Command: %s", self.openstack_command) logging.info("Running OpenStack Command: %s", self.openstack_command)
# Emulate "source" in bash. Sets up environment variables. # Read and parse shiyard.conf
pipe = subprocess.Popen( config = configparser.ConfigParser()
". %s; env" % self.openrc_file, stdout=subprocess.PIPE, shell=True) config.read(self.shipyard_conf)
data = pipe.communicate()[0]
os_env = dict((line.split("=", 1) for line in data.splitlines())) # Construct Envrionment variables
for attr in ('OS_AUTH_URL', 'OS_PROJECT_ID', 'OS_PROJECT_NAME',
'OS_USER_DOMAIN_NAME', 'OS_USERNAME', 'OS_PASSWORD',
'OS_REGION_NAME', 'OS_IDENTITY_API_VERSION'):
os.environ[attr] = config.get('keystone', attr)
# Execute the OpenStack CLI Command # Execute the OpenStack CLI Command
openstack_cli = subprocess.Popen( openstack_cli = subprocess.Popen(
self.openstack_command, self.openstack_command,
env=os_env,
stdout=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT) stderr=subprocess.STDOUT)
@@ -65,7 +68,7 @@ class OpenStackOperator(BaseOperator):
line = line.strip() line = line.strip()
logging.info(line) logging.info(line)
# Wait for child process to terminate. # Wait for child process to terminate
# Set and return returncode attribute. # Set and return returncode attribute.
openstack_cli.wait() openstack_cli.wait()
logging.info("Command exited with " logging.info("Command exited with "
@@ -74,11 +77,6 @@ class OpenStackOperator(BaseOperator):
# Raise Execptions if OpenStack Command Fails # Raise Execptions if OpenStack Command Fails
if openstack_cli.returncode: if openstack_cli.returncode:
raise AirflowException("OpenStack Command Failed") raise AirflowException("OpenStack Command Failed")
"""
Push response to an XCom if xcom_push is True
"""
if self.xcom_push_flag:
return line
class OpenStackCliPlugin(AirflowPlugin): class OpenStackCliPlugin(AirflowPlugin):