Airflow stable 2.6.2
This PS updates python modules and code to match Airflow 2.6.2 as well as deploys new Airflow: - bionic py36 gates were removed - python code corrected to match new modules versions - selection of python modules versions was performed based on airflow-2.6.2 constraints - airskiff deploy pipeline was aligned with latest in treasuremap v1.9 - shipyard chart was corrected to match new airflow cli, configuration items and their default values - added new celery configuration items and their values - updated airflow runtime logging config - disabled deprecation and future python warnings in airflow images - added celery to the list of airflow providers - adjusted airflow runtime scripts to match new cli - shipyard SQL queries to airflow DB were adjusted to match new SQL schema of the db - shipyard_airflow and shipyard_client unit tests were updated to match new DB structure and new cli - airflow db sync job is using db upgrade command - helm version uplifted to v3.12.2 Change-Id: Ife88e53ce0dd8dc77bf267de1f5e6b8361ca76fd
This commit is contained in:
parent
3fcc6e0d65
commit
81066ae98f
3
.gitignore
vendored
3
.gitignore
vendored
@ -49,6 +49,7 @@ nosetests.xml
|
|||||||
coverage.xml
|
coverage.xml
|
||||||
*.cover
|
*.cover
|
||||||
.hypothesis/
|
.hypothesis/
|
||||||
|
cover/
|
||||||
|
|
||||||
# Translations
|
# Translations
|
||||||
*.mo
|
*.mo
|
||||||
@ -126,4 +127,4 @@ airflow.cfg
|
|||||||
airflow.db
|
airflow.db
|
||||||
latest
|
latest
|
||||||
src/bin/shipyard_airflow/shipyard_airflow/config
|
src/bin/shipyard_airflow/shipyard_airflow/config
|
||||||
src/bin/shipyard_airflow/shipyard_airflow/webserver_config.py
|
src/bin/shipyard_airflow/shipyard_airflow/webserver_config.py
|
||||||
|
71
.zuul.yaml
71
.zuul.yaml
@ -14,36 +14,30 @@
|
|||||||
templates:
|
templates:
|
||||||
- docs-on-readthedocs
|
- docs-on-readthedocs
|
||||||
- openstack-python38-jobs
|
- openstack-python38-jobs
|
||||||
- openstack-python36-jobs
|
|
||||||
vars:
|
vars:
|
||||||
rtd_webhook_id: '38576'
|
rtd_webhook_id: '38576'
|
||||||
rtd_project_name: 'airship-shipyard'
|
rtd_project_name: 'airship-shipyard'
|
||||||
ensure_global_symlinks: true
|
ensure_global_symlinks: true
|
||||||
check:
|
check:
|
||||||
jobs:
|
jobs:
|
||||||
- openstack-tox-pep8
|
- airship-shipyard-openstack-tox-pep8-focal
|
||||||
- airship-shipyard-openstack-tox-cover-focal
|
- airship-shipyard-openstack-tox-cover-focal
|
||||||
- airship-shipyard-chart-build-gate
|
- airship-shipyard-chart-build-gate
|
||||||
- airship-shipyard-chart-build-latest-htk
|
- airship-shipyard-chart-build-latest-htk
|
||||||
- airship-shipyard-whitespace-lint-gate
|
- airship-shipyard-whitespace-lint-gate
|
||||||
- airship-shipyard-airskiff-deployment-bionic
|
|
||||||
- airship-shipyard-airskiff-deployment-focal
|
- airship-shipyard-airskiff-deployment-focal
|
||||||
- airship-shipyard-image-gate-ubuntu_focal
|
- airship-shipyard-image-gate-ubuntu_focal
|
||||||
- airship-shipyard-image-gate-ubuntu_bionic
|
|
||||||
gate:
|
gate:
|
||||||
jobs:
|
jobs:
|
||||||
- openstack-tox-pep8
|
- airship-shipyard-openstack-tox-pep8-focal
|
||||||
- airship-shipyard-openstack-tox-cover-focal
|
- airship-shipyard-openstack-tox-cover-focal
|
||||||
- airship-shipyard-chart-build-gate
|
- airship-shipyard-chart-build-gate
|
||||||
- airship-shipyard-whitespace-lint-gate
|
- airship-shipyard-whitespace-lint-gate
|
||||||
- airship-shipyard-airskiff-deployment-bionic
|
|
||||||
- airship-shipyard-airskiff-deployment-focal
|
- airship-shipyard-airskiff-deployment-focal
|
||||||
- airship-shipyard-image-gate-ubuntu_focal
|
- airship-shipyard-image-gate-ubuntu_focal
|
||||||
- airship-shipyard-image-gate-ubuntu_bionic
|
|
||||||
post:
|
post:
|
||||||
jobs:
|
jobs:
|
||||||
- airship-shipyard-docker-build-publish-ubuntu_focal
|
- airship-shipyard-docker-build-publish-ubuntu_focal
|
||||||
- airship-shipyard-docker-build-publish-ubuntu_bionic
|
|
||||||
- shipyard-upload-git-mirror
|
- shipyard-upload-git-mirror
|
||||||
|
|
||||||
- nodeset:
|
- nodeset:
|
||||||
@ -58,6 +52,12 @@
|
|||||||
- name: primary
|
- name: primary
|
||||||
label: ubuntu-focal
|
label: ubuntu-focal
|
||||||
|
|
||||||
|
- job:
|
||||||
|
name: airship-shipyard-openstack-tox-pep8-focal
|
||||||
|
parent: openstack-tox-pep8
|
||||||
|
description: Runs pep8 job on focal
|
||||||
|
nodeset: airship-shipyard-single-node-focal
|
||||||
|
|
||||||
|
|
||||||
- job:
|
- job:
|
||||||
name: airship-shipyard-openstack-tox-cover-focal
|
name: airship-shipyard-openstack-tox-cover-focal
|
||||||
@ -116,19 +116,6 @@
|
|||||||
dynamic:
|
dynamic:
|
||||||
patch_set: true
|
patch_set: true
|
||||||
|
|
||||||
- job:
|
|
||||||
name: airship-shipyard-image-gate-ubuntu_bionic
|
|
||||||
description: |
|
|
||||||
Run shipyard-image build for ubuntu_bionic
|
|
||||||
parent: airship-shipyard-image-base
|
|
||||||
irrelevant-files: *irrelevant-files
|
|
||||||
vars:
|
|
||||||
publish: false
|
|
||||||
distro: ubuntu_bionic
|
|
||||||
tags:
|
|
||||||
dynamic:
|
|
||||||
patch_set: true
|
|
||||||
|
|
||||||
- job:
|
- job:
|
||||||
name: airship-shipyard-airskiff-deployment-focal
|
name: airship-shipyard-airskiff-deployment-focal
|
||||||
nodeset: airship-shipyard-single-node-focal
|
nodeset: airship-shipyard-single-node-focal
|
||||||
@ -145,34 +132,14 @@
|
|||||||
- airship/treasuremap
|
- airship/treasuremap
|
||||||
vars:
|
vars:
|
||||||
CLONE_SHIPYARD: false
|
CLONE_SHIPYARD: false
|
||||||
HTK_COMMIT: master
|
HTK_COMMIT: 97ce6d7d8e9a090c748800d69a57bbd9af698b60
|
||||||
SHIPYARD_IMAGE_DISTRO: ubuntu_focal
|
SHIPYARD_IMAGE_DISTRO: ubuntu_focal
|
||||||
irrelevant-files: *irrelevant-files
|
irrelevant-files: *irrelevant-files
|
||||||
|
|
||||||
- job:
|
|
||||||
name: airship-shipyard-airskiff-deployment-bionic
|
|
||||||
nodeset: airship-shipyard-single-node-focal
|
|
||||||
description: |
|
|
||||||
Deploy Memcached using Airskiff and submitted Armada changes.
|
|
||||||
timeout: 9600
|
|
||||||
voting: true
|
|
||||||
pre-run:
|
|
||||||
- tools/gate/playbooks/airskiff-reduce-site.yaml
|
|
||||||
- tools/gate/playbooks/git-config.yaml
|
|
||||||
run: tools/gate/playbooks/airskiff-deploy.yaml
|
|
||||||
post-run: tools/gate/playbooks/debug-report.yaml
|
|
||||||
required-projects:
|
|
||||||
- airship/treasuremap
|
|
||||||
vars:
|
|
||||||
CLONE_SHIPYARD: false
|
|
||||||
HTK_COMMIT: master
|
|
||||||
SHIPYARD_IMAGE_DISTRO: ubuntu_bionic
|
|
||||||
irrelevant-files: *irrelevant-files
|
|
||||||
|
|
||||||
|
|
||||||
- job:
|
- job:
|
||||||
name: airship-shipyard-docker-build-publish-ubuntu_focal
|
name: airship-shipyard-docker-build-publish-ubuntu_focal
|
||||||
timeout: 1800
|
timeout: 3600
|
||||||
run: tools/gate/playbooks/run-image.yaml
|
run: tools/gate/playbooks/run-image.yaml
|
||||||
nodeset: airship-shipyard-single-node-focal
|
nodeset: airship-shipyard-single-node-focal
|
||||||
irrelevant-files: *irrelevant-files
|
irrelevant-files: *irrelevant-files
|
||||||
@ -187,24 +154,8 @@
|
|||||||
commit: true
|
commit: true
|
||||||
static:
|
static:
|
||||||
- latest
|
- latest
|
||||||
|
- airflow_2.6.2
|
||||||
|
|
||||||
- job:
|
|
||||||
name: airship-shipyard-docker-build-publish-ubuntu_bionic
|
|
||||||
timeout: 1800
|
|
||||||
run: tools/gate/playbooks/run-image.yaml
|
|
||||||
nodeset: airship-shipyard-single-node-focal
|
|
||||||
irrelevant-files: *irrelevant-files
|
|
||||||
secrets:
|
|
||||||
- airship_shipyard_quay_creds
|
|
||||||
vars:
|
|
||||||
publish: true
|
|
||||||
distro: ubuntu_bionic
|
|
||||||
tags:
|
|
||||||
dynamic:
|
|
||||||
branch: true
|
|
||||||
commit: true
|
|
||||||
static:
|
|
||||||
- latest
|
|
||||||
|
|
||||||
|
|
||||||
- secret:
|
- secret:
|
||||||
|
@ -16,7 +16,7 @@
|
|||||||
apiVersion: v1
|
apiVersion: v1
|
||||||
description: A Helm chart for Shipyard and Airflow
|
description: A Helm chart for Shipyard and Airflow
|
||||||
name: shipyard
|
name: shipyard
|
||||||
version: 0.1.2
|
version: 0.2.0
|
||||||
keywords:
|
keywords:
|
||||||
- shipyard
|
- shipyard
|
||||||
- airflow
|
- airflow
|
||||||
|
@ -19,4 +19,4 @@ limitations under the License.
|
|||||||
|
|
||||||
set -ex
|
set -ex
|
||||||
|
|
||||||
airflow initdb
|
airflow db upgrade
|
||||||
|
@ -89,7 +89,7 @@ spec:
|
|||||||
subPath: shipyard.conf
|
subPath: shipyard.conf
|
||||||
readOnly: true
|
readOnly: true
|
||||||
- name: airflow-logs
|
- name: airflow-logs
|
||||||
mountPath: {{ .Values.conf.airflow.core.base_log_folder }}
|
mountPath: {{ .Values.conf.airflow.logging.base_log_folder }}
|
||||||
{{ if $mounts_airflow_scheduler.volumeMounts }}{{ toYaml $mounts_airflow_scheduler.volumeMounts | indent 12 }}{{ end }}
|
{{ if $mounts_airflow_scheduler.volumeMounts }}{{ toYaml $mounts_airflow_scheduler.volumeMounts | indent 12 }}{{ end }}
|
||||||
volumes:
|
volumes:
|
||||||
- name: airflow-etc
|
- name: airflow-etc
|
||||||
|
@ -142,7 +142,7 @@ spec:
|
|||||||
subPath: shipyard.conf
|
subPath: shipyard.conf
|
||||||
readOnly: true
|
readOnly: true
|
||||||
- name: airflow-logs
|
- name: airflow-logs
|
||||||
mountPath: {{ .Values.conf.airflow.core.base_log_folder }}
|
mountPath: {{ .Values.conf.airflow.logging.base_log_folder }}
|
||||||
{{ if $mounts_shipyard.volumeMounts }}{{ toYaml $mounts_shipyard.volumeMounts | indent 12 }}{{ end }}
|
{{ if $mounts_shipyard.volumeMounts }}{{ toYaml $mounts_shipyard.volumeMounts | indent 12 }}{{ end }}
|
||||||
volumes:
|
volumes:
|
||||||
{{ if .Values.conf.shipyard.base.profiler }}
|
{{ if .Values.conf.shipyard.base.profiler }}
|
||||||
|
@ -120,10 +120,10 @@ spec:
|
|||||||
- chown
|
- chown
|
||||||
- -R
|
- -R
|
||||||
- "airflow:airflow"
|
- "airflow:airflow"
|
||||||
- {{ .Values.conf.airflow.core.base_log_folder }}
|
- {{ .Values.conf.airflow.logging.base_log_folder }}
|
||||||
volumeMounts:
|
volumeMounts:
|
||||||
- name: airflow-logs
|
- name: airflow-logs
|
||||||
mountPath: {{ .Values.conf.airflow.core.base_log_folder }}
|
mountPath: {{ .Values.conf.airflow.logging.base_log_folder }}
|
||||||
containers:
|
containers:
|
||||||
- name: airflow-scheduler
|
- name: airflow-scheduler
|
||||||
image: {{ .Values.images.tags.airflow }}
|
image: {{ .Values.images.tags.airflow }}
|
||||||
@ -152,7 +152,7 @@ spec:
|
|||||||
subPath: shipyard.conf
|
subPath: shipyard.conf
|
||||||
readOnly: true
|
readOnly: true
|
||||||
- name: airflow-logs
|
- name: airflow-logs
|
||||||
mountPath: {{ .Values.conf.airflow.core.base_log_folder }}
|
mountPath: {{ .Values.conf.airflow.logging.base_log_folder }}
|
||||||
- name: airflow-worker
|
- name: airflow-worker
|
||||||
image: {{ .Values.images.tags.airflow }}
|
image: {{ .Values.images.tags.airflow }}
|
||||||
imagePullPolicy: {{ .Values.images.pull_policy }}
|
imagePullPolicy: {{ .Values.images.pull_policy }}
|
||||||
@ -184,7 +184,7 @@ spec:
|
|||||||
subPath: shipyard.conf
|
subPath: shipyard.conf
|
||||||
readOnly: true
|
readOnly: true
|
||||||
- name: airflow-logs
|
- name: airflow-logs
|
||||||
mountPath: {{ .Values.conf.airflow.core.base_log_folder }}
|
mountPath: {{ .Values.conf.airflow.logging.base_log_folder }}
|
||||||
- name: airflow-logrotate
|
- name: airflow-logrotate
|
||||||
image: {{ .Values.images.tags.airflow }}
|
image: {{ .Values.images.tags.airflow }}
|
||||||
imagePullPolicy: {{ .Values.images.pull_policy }}
|
imagePullPolicy: {{ .Values.images.pull_policy }}
|
||||||
@ -197,12 +197,12 @@ spec:
|
|||||||
- name: DAYS_BEFORE_LOG_DELETION
|
- name: DAYS_BEFORE_LOG_DELETION
|
||||||
value: {{ .Values.logrotate.days_before_deletion | quote }}
|
value: {{ .Values.logrotate.days_before_deletion | quote }}
|
||||||
- name: LOGROTATE_PATH
|
- name: LOGROTATE_PATH
|
||||||
value: {{ .Values.conf.airflow.core.base_log_folder }}
|
value: {{ .Values.conf.airflow.logging.base_log_folder }}
|
||||||
- name: LOGROTATE_ENABLED
|
- name: LOGROTATE_ENABLED
|
||||||
value: "enabled"
|
value: "enabled"
|
||||||
volumeMounts:
|
volumeMounts:
|
||||||
- name: airflow-logs
|
- name: airflow-logs
|
||||||
mountPath: {{ .Values.conf.airflow.core.base_log_folder }}
|
mountPath: {{ .Values.conf.airflow.logging.base_log_folder }}
|
||||||
volumes:
|
volumes:
|
||||||
- name: pod-tmp
|
- name: pod-tmp
|
||||||
emptyDir: {}
|
emptyDir: {}
|
||||||
|
@ -471,34 +471,13 @@ conf:
|
|||||||
# Envrionment variable AIRFLOW_HOME is used instead.
|
# Envrionment variable AIRFLOW_HOME is used instead.
|
||||||
#airflow_home: /usr/local/airflow
|
#airflow_home: /usr/local/airflow
|
||||||
dags_folder: /usr/local/airflow/dags
|
dags_folder: /usr/local/airflow/dags
|
||||||
base_log_folder: /usr/local/airflow/logs
|
sensitive_var_conn_names:
|
||||||
remote_logging: "False"
|
- sql_alchemy_conn
|
||||||
remote_log_conn_id: ""
|
- broker_url
|
||||||
remote_base_log_folder: ""
|
- result_backend
|
||||||
encrypt_s3_logs: "False"
|
- fernet_key
|
||||||
logging_level: "INFO"
|
lazy_discover_providers: "False"
|
||||||
fab_logging_level: "WARN"
|
lazy_load_plugins: "False"
|
||||||
# See image-bundled log_config.py.
|
|
||||||
# Adds console logging of task/step logs.
|
|
||||||
logging_config_class: log_config.LOGGING_CONFIG
|
|
||||||
# NOTE: Airflow 1.10 introduces extra newline characters between log
|
|
||||||
# records. Version 1.10.1 should resolve this issue
|
|
||||||
# https://issues.apache.org/jira/browse/AIRFLOW-1917
|
|
||||||
#
|
|
||||||
# NOTE: The log format ends up repeated for each log record that we log
|
|
||||||
# in our custom operators, once for the logging_mixin class of
|
|
||||||
# Airflow itself, and once again for the message we want to log.
|
|
||||||
# E.g.:
|
|
||||||
# 2018-09-21 19:38:48,950 INFO logging_mixin(95) write - 2018-09-21 19:38:48,950 INFO deployment_configuration_operator(135) get_doc - Deckhand Client acquired
|
|
||||||
#
|
|
||||||
# NOTE: Updated from default to match Shipyard logging as much as
|
|
||||||
# possible without more aggressive techniques
|
|
||||||
#
|
|
||||||
log_format: "%%(asctime)s %%(levelname)-8s %%(filename)s:%%(lineno)3d:%%(funcName)s %%(module)s %%(message)s"
|
|
||||||
simple_log_format: "%%(asctime)s %%(levelname)s - %%(message)s"
|
|
||||||
log_filename_template: "{{ ti.dag_id }}/{{ ti.task_id }}/{{ execution_date.strftime('%%Y-%%m-%%dT%%H:%%M:%%S') }}/{{ try_number }}.log"
|
|
||||||
log_processor_filename_template: "{{ filename }}.log"
|
|
||||||
dag_processor_manager_log_location: /usr/local/airflow/logs/dag_processor_manager/dag_processor_manager.log
|
|
||||||
hostname_callable: "socket:getfqdn"
|
hostname_callable: "socket:getfqdn"
|
||||||
default_timezone: "utc"
|
default_timezone: "utc"
|
||||||
executor: "CeleryExecutor"
|
executor: "CeleryExecutor"
|
||||||
@ -539,9 +518,40 @@ conf:
|
|||||||
endpoint_url: http://localhost/
|
endpoint_url: http://localhost/
|
||||||
api:
|
api:
|
||||||
auth_backend: airflow.api.auth.backend.default
|
auth_backend: airflow.api.auth.backend.default
|
||||||
|
enable_experimental_api: "True"
|
||||||
lineage:
|
lineage:
|
||||||
# Shipyard is not using this
|
# Shipyard is not using this
|
||||||
backend: ""
|
backend: ""
|
||||||
|
logging:
|
||||||
|
# See image-bundled log_config.py.
|
||||||
|
# Adds console logging of task/step logs.
|
||||||
|
# logging_config_class: log_config.LOGGING_CONFIG
|
||||||
|
logging_config_class: new_log_config.LOGGING_CONFIG
|
||||||
|
# NOTE: Airflow 1.10 introduces extra newline characters between log
|
||||||
|
# records. Version 1.10.1 should resolve this issue
|
||||||
|
# https://issues.apache.org/jira/browse/AIRFLOW-1917
|
||||||
|
#
|
||||||
|
# NOTE: The log format ends up repeated for each log record that we log
|
||||||
|
# in our custom operators, once for the logging_mixin class of
|
||||||
|
# Airflow itself, and once again for the message we want to log.
|
||||||
|
# E.g.:
|
||||||
|
# 2018-09-21 19:38:48,950 INFO logging_mixin(95) write - 2018-09-21 19:38:48,950 INFO deployment_configuration_operator(135) get_doc - Deckhand Client acquired
|
||||||
|
#
|
||||||
|
# NOTE: Updated from default to match Shipyard logging as much as
|
||||||
|
# possible without more aggressive techniques
|
||||||
|
#
|
||||||
|
log_format: "%%(asctime)s %%(levelname)-8s %%(filename)s:%%(lineno)3d:%%(funcName)s %%(module)s %%(message)s"
|
||||||
|
log_filename_template: "{{ ti.dag_id }}/{{ ti.task_id }}/{{ execution_date.strftime('%%Y-%%m-%%dT%%H:%%M:%%S') }}/{{ try_number }}.log"
|
||||||
|
log_processor_filename_template: "{{ filename }}.log"
|
||||||
|
dag_processor_manager_log_location: /usr/local/airflow/logs/dag_processor_manager/dag_processor_manager.log
|
||||||
|
logging_level: "INFO"
|
||||||
|
fab_logging_level: "WARNING"
|
||||||
|
celery_logging_level: "INFO"
|
||||||
|
base_log_folder: /usr/local/airflow/logs
|
||||||
|
remote_logging: "False"
|
||||||
|
remote_log_conn_id: ""
|
||||||
|
remote_base_log_folder: ""
|
||||||
|
encrypt_s3_logs: "False"
|
||||||
atlas:
|
atlas:
|
||||||
# Shipyard is not using this
|
# Shipyard is not using this
|
||||||
sasl_enabled: "False"
|
sasl_enabled: "False"
|
||||||
@ -617,6 +627,11 @@ conf:
|
|||||||
flower_url_prefix: ""
|
flower_url_prefix: ""
|
||||||
flower_port: 5555
|
flower_port: 5555
|
||||||
flower_basic_auth: ""
|
flower_basic_auth: ""
|
||||||
|
operation_timeout: 30.0
|
||||||
|
broker_connection_retry_on_startup: "True"
|
||||||
|
broker_connection_retry: "True"
|
||||||
|
broker_connection_max_retries: 0
|
||||||
|
broker_connection_timeout: 30.0
|
||||||
default_queue: "default"
|
default_queue: "default"
|
||||||
# How many processes CeleryExecutor uses to sync task state.
|
# How many processes CeleryExecutor uses to sync task state.
|
||||||
# 0 means to use max(1, number of cores - 1) processes.
|
# 0 means to use max(1, number of cores - 1) processes.
|
||||||
@ -645,7 +660,6 @@ conf:
|
|||||||
# scheduler section in the docs for more information). This defines
|
# scheduler section in the docs for more information). This defines
|
||||||
# how often the scheduler should run (in seconds).
|
# how often the scheduler should run (in seconds).
|
||||||
scheduler_heartbeat_sec: 10
|
scheduler_heartbeat_sec: 10
|
||||||
run_duration: -1
|
|
||||||
# Check for pending dag runs no more than every 10 seconds
|
# Check for pending dag runs no more than every 10 seconds
|
||||||
min_file_process_interval: 10
|
min_file_process_interval: 10
|
||||||
dag_dir_list_interval: 300
|
dag_dir_list_interval: 300
|
||||||
@ -655,16 +669,19 @@ conf:
|
|||||||
scheduler_zombie_task_threshold: 300
|
scheduler_zombie_task_threshold: 300
|
||||||
catchup_by_default: "True"
|
catchup_by_default: "True"
|
||||||
max_tis_per_query: 512
|
max_tis_per_query: 512
|
||||||
statsd_on: "False"
|
|
||||||
statsd_host: "localhost"
|
|
||||||
statsd_port: 8125
|
|
||||||
statsd_prefix: "airflow"
|
|
||||||
# Shipyard's use of Airflow is low volume. 1 Thread is probably enough.
|
# Shipyard's use of Airflow is low volume. 1 Thread is probably enough.
|
||||||
max_threads: 1
|
# deprecated in 1.10.14. Replaced with "parsing_processes"
|
||||||
|
# max_threads: 1
|
||||||
|
parsing_processes: 1
|
||||||
authenticate: "False"
|
authenticate: "False"
|
||||||
# Turn off scheduler use of cron intervals by setting this to False.
|
# Turn off scheduler use of cron intervals by setting this to False.
|
||||||
# DAGs submitted manually in the web UI or with trigger_dag will still run.
|
# DAGs submitted manually in the web UI or with trigger_dag will still run.
|
||||||
use_job_schedule: "False"
|
use_job_schedule: "False"
|
||||||
|
metrics:
|
||||||
|
statsd_on: "False"
|
||||||
|
statsd_host: "localhost"
|
||||||
|
statsd_port: 8125
|
||||||
|
statsd_prefix: "airflow"
|
||||||
ldap:
|
ldap:
|
||||||
# Shipyard is not using this
|
# Shipyard is not using this
|
||||||
uri: ""
|
uri: ""
|
||||||
|
@ -32,6 +32,7 @@ ENV WEB_PORT 8080
|
|||||||
ENV FLOWER_PORT 5555
|
ENV FLOWER_PORT 5555
|
||||||
ENV WORKER_PORT 8793
|
ENV WORKER_PORT 8793
|
||||||
ENV SLUGIFY_USES_TEXT_UNIDECODE yes
|
ENV SLUGIFY_USES_TEXT_UNIDECODE yes
|
||||||
|
ENV PYTHONWARNINGS=ignore::DeprecationWarning,ignore::FutureWarning
|
||||||
|
|
||||||
# Expose port for applications
|
# Expose port for applications
|
||||||
EXPOSE $WEB_PORT
|
EXPOSE $WEB_PORT
|
||||||
@ -43,12 +44,12 @@ ARG AIRFLOW_HOME=/usr/local/airflow
|
|||||||
# Moved celery to images/airflow/requirements.txt as apache-airflow uses a
|
# Moved celery to images/airflow/requirements.txt as apache-airflow uses a
|
||||||
# version of celery incompatibile with the version of kombu needed by other
|
# version of celery incompatibile with the version of kombu needed by other
|
||||||
# Airship components
|
# Airship components
|
||||||
ARG AIRFLOW_SRC="apache-airflow[crypto,postgres,hive,hdfs,jdbc]==1.10.5"
|
ARG AIRFLOW_SRC="apache-airflow[crypto,celery,hive,hdfs,jdbc,postgres]==2.6.2"
|
||||||
ARG DEBIAN_FRONTEND=noninteractive
|
ARG DEBIAN_FRONTEND=noninteractive
|
||||||
ARG ctx_base=src/bin
|
ARG ctx_base=src/bin
|
||||||
|
|
||||||
# Kubectl version
|
# Kubectl version
|
||||||
ARG KUBECTL_VERSION=1.26.3
|
ARG KUBECTL_VERSION=1.27.1
|
||||||
|
|
||||||
# Needed from apache-airflow 1.10.2, since core.airflow_home config is deprecated
|
# Needed from apache-airflow 1.10.2, since core.airflow_home config is deprecated
|
||||||
ENV AIRFLOW_HOME=${AIRFLOW_HOME}
|
ENV AIRFLOW_HOME=${AIRFLOW_HOME}
|
||||||
@ -124,8 +125,8 @@ RUN useradd -ms /bin/bash -d ${AIRFLOW_HOME} airflow \
|
|||||||
# Install Airflow directly to allow overriding source
|
# Install Airflow directly to allow overriding source
|
||||||
# COPY images/airflow/requirements.txt /tmp/
|
# COPY images/airflow/requirements.txt /tmp/
|
||||||
COPY ${ctx_base}/shipyard_airflow/requirements-frozen.txt /tmp/requirements.txt
|
COPY ${ctx_base}/shipyard_airflow/requirements-frozen.txt /tmp/requirements.txt
|
||||||
RUN pip3 install -r /tmp/requirements.txt --no-cache-dir
|
RUN pip3 install -r /tmp/requirements.txt --no-cache-dir \
|
||||||
# && pip3 install $AIRFLOW_SRC --no-cache-dir
|
&& pip3 install $AIRFLOW_SRC --no-cache-dir
|
||||||
# && (pip3 uninstall -y snakebite || true) \
|
# && (pip3 uninstall -y snakebite || true) \
|
||||||
# && (pip3 uninstall -y psycopg2 || true) \
|
# && (pip3 uninstall -y psycopg2 || true) \
|
||||||
# && (pip3 install --no-cache-dir --force-reinstall $(pip freeze | grep psycopg2-binary) || true)
|
# && (pip3 install --no-cache-dir --force-reinstall $(pip freeze | grep psycopg2-binary) || true)
|
||||||
@ -135,6 +136,7 @@ COPY images/airflow/script/*.sh ${AIRFLOW_HOME}/
|
|||||||
|
|
||||||
# Copy configuration (e.g. logging config for Airflow):
|
# Copy configuration (e.g. logging config for Airflow):
|
||||||
COPY images/airflow/config/*.py ${AIRFLOW_HOME}/config/
|
COPY images/airflow/config/*.py ${AIRFLOW_HOME}/config/
|
||||||
|
COPY images/airflow/webserver_config.py ${AIRFLOW_HOME}/
|
||||||
|
|
||||||
# Change permissions
|
# Change permissions
|
||||||
RUN chown -R airflow: ${AIRFLOW_HOME}
|
RUN chown -R airflow: ${AIRFLOW_HOME}
|
||||||
@ -167,7 +169,11 @@ RUN cd /tmp/shipyard \
|
|||||||
#
|
#
|
||||||
# Copy the plugins and dags that will be used by this Airflow image:
|
# Copy the plugins and dags that will be used by this Airflow image:
|
||||||
COPY ${ctx_base}/shipyard_airflow/shipyard_airflow/plugins ${AIRFLOW_HOME}/plugins/
|
COPY ${ctx_base}/shipyard_airflow/shipyard_airflow/plugins ${AIRFLOW_HOME}/plugins/
|
||||||
|
COPY ${ctx_base}/shipyard_airflow/shipyard_airflow/plugins \
|
||||||
|
/usr/local/lib/python3.8/dist-packages/airflow/plugins/
|
||||||
COPY ${ctx_base}/shipyard_airflow/shipyard_airflow/dags ${AIRFLOW_HOME}/dags/
|
COPY ${ctx_base}/shipyard_airflow/shipyard_airflow/dags ${AIRFLOW_HOME}/dags/
|
||||||
|
COPY ${ctx_base}/shipyard_airflow/shipyard_airflow/dags \
|
||||||
|
/usr/local/lib/python3.8/dist-packages/airflow/dags/
|
||||||
|
|
||||||
# Set work directory
|
# Set work directory
|
||||||
USER airflow
|
USER airflow
|
||||||
|
4
images/airflow/config/new_log_config.py
Normal file
4
images/airflow/config/new_log_config.py
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
from copy import deepcopy
|
||||||
|
from airflow.config_templates.airflow_local_settings import DEFAULT_LOGGING_CONFIG
|
||||||
|
|
||||||
|
LOGGING_CONFIG = deepcopy(DEFAULT_LOGGING_CONFIG)
|
@ -19,15 +19,15 @@ python3_path=$(which python3)
|
|||||||
airflow_path=$(which airflow)
|
airflow_path=$(which airflow)
|
||||||
|
|
||||||
# Initialize Airflow DB
|
# Initialize Airflow DB
|
||||||
if [[ $cmd == 'initdb' ]]; then
|
if [[ $cmd == 'db init' ]]; then
|
||||||
${python3_path} ${airflow_path} initdb
|
${python3_path} ${airflow_path} db init
|
||||||
# Start the services based on argument from Airflow Helm Chart
|
# Start the services based on argument from Airflow Helm Chart
|
||||||
elif [[ $cmd == 'webserver' ]]; then
|
elif [[ $cmd == 'webserver' ]]; then
|
||||||
${python3_path} ${airflow_path} webserver
|
${python3_path} ${airflow_path} webserver
|
||||||
elif [[ $cmd == 'flower' ]]; then
|
elif [[ $cmd == 'flower' ]]; then
|
||||||
${python3_path} ${airflow_path} flower
|
${python3_path} ${airflow_path} celery flower --pid=/tmp/airflow-flower.pid
|
||||||
elif [[ $cmd == 'worker' ]]; then
|
elif [[ $cmd == 'worker' ]]; then
|
||||||
${python3_path} ${airflow_path} worker
|
${python3_path} ${airflow_path} celery worker --pid=/tmp/airflow-worker.pid
|
||||||
# If command contains the word 'scheduler'
|
# If command contains the word 'scheduler'
|
||||||
elif [[ $cmd == *scheduler* ]]; then
|
elif [[ $cmd == *scheduler* ]]; then
|
||||||
while true; do
|
while true; do
|
||||||
@ -39,11 +39,14 @@ elif [[ $cmd == *scheduler* ]]; then
|
|||||||
${python3_path} ${airflow_path} scheduler $2 $3
|
${python3_path} ${airflow_path} scheduler $2 $3
|
||||||
done
|
done
|
||||||
elif [[ $cmd == 'quicktest' ]]; then
|
elif [[ $cmd == 'quicktest' ]]; then
|
||||||
${python3_path} ${airflow_path} initdb
|
${python3_path} ${airflow_path} db init
|
||||||
|
${python3_path} ${airflow_path} db upgrade
|
||||||
|
${python3_path} ${airflow_path} dags list
|
||||||
${python3_path} ${airflow_path} webserver -p 8080 &
|
${python3_path} ${airflow_path} webserver -p 8080 &
|
||||||
airflow run example_bash_operator runme_0 2018-01-01
|
${python3_path} ${airflow_path} tasks test example_bash_operator runme_0
|
||||||
airflow backfill example_bash_operator -s 2018-01-01 -e 2018-01-02
|
${python3_path} ${airflow_path} dags backfill example_bash_operator -s 2018-01-01 -e 2018-01-02
|
||||||
airflow dag_state example_bash_operator 2018-01-01
|
${python3_path} ${airflow_path} tasks run example_bash_operator runme_0 2018-01-01
|
||||||
|
${python3_path} ${airflow_path} dags state example_bash_operator 2018-01-01
|
||||||
else
|
else
|
||||||
echo "Invalid Command!"
|
echo "Invalid Command!"
|
||||||
exit 1
|
exit 1
|
||||||
|
@ -64,12 +64,12 @@ do
|
|||||||
|
|
||||||
# Get current state of dag using Airflow CLI
|
# Get current state of dag using Airflow CLI
|
||||||
# Use grep to remove logging messages that can pollute the status response
|
# Use grep to remove logging messages that can pollute the status response
|
||||||
check_dag_state=`airflow dag_state ${dag_id} ${dag_execution_date} | grep -vE "DEBUG|INFO|WARN|ERROR"`
|
check_dag_state=$(airflow dags state ${dag_id} ${dag_execution_date} | grep -vE "DEBUG|INFO|WARN|ERROR")
|
||||||
echo -e ${check_dag_state} >> /usr/local/airflow/upgrade_airflow_worker.log
|
echo -e ${check_dag_state} >> /usr/local/airflow/upgrade_airflow_worker.log
|
||||||
|
|
||||||
# We will need to extract the last word in the 'check_dag_state'
|
# We will need to extract the last word in the 'check_dag_state'
|
||||||
# string variable as that will contain the status of the dag run
|
# string variable as that will contain the status of the dag run
|
||||||
dag_state=`echo ${check_dag_state} | awk '{print $NF}'`
|
dag_state=$(echo ${check_dag_state} | awk '{print $NF}')
|
||||||
echo -e ${dag_state} >> /usr/local/airflow/upgrade_airflow_worker.log
|
echo -e ${dag_state} >> /usr/local/airflow/upgrade_airflow_worker.log
|
||||||
|
|
||||||
if [[ $dag_state == "success" ]]; then
|
if [[ $dag_state == "success" ]]; then
|
||||||
@ -78,7 +78,7 @@ do
|
|||||||
echo -e "Proceeding to upgrade Airflow Worker..." >> /usr/local/airflow/upgrade_airflow_worker.log
|
echo -e "Proceeding to upgrade Airflow Worker..." >> /usr/local/airflow/upgrade_airflow_worker.log
|
||||||
echo -e "Deleting Airflow Worker Pods..." >> /usr/local/airflow/upgrade_airflow_worker.log
|
echo -e "Deleting Airflow Worker Pods..." >> /usr/local/airflow/upgrade_airflow_worker.log
|
||||||
|
|
||||||
for i in `kubectl get pods -n ucp | grep -i airflow-worker | awk '{print $1}'`; do
|
for i in $(kubectl get pods -n ucp | grep -i airflow-worker | awk '{print $1}'); do
|
||||||
# Delete Airflow Worker pod so that they will respawn with the new
|
# Delete Airflow Worker pod so that they will respawn with the new
|
||||||
# configurations and/or images
|
# configurations and/or images
|
||||||
kubectl delete pod $i -n ucp
|
kubectl delete pod $i -n ucp
|
||||||
|
10
images/airflow/webserver_config.py
Normal file
10
images/airflow/webserver_config.py
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
"""Default configuration for the Airflow webserver."""
|
||||||
|
from __future__ import annotations
|
||||||
|
import os
|
||||||
|
from airflow.www.fab_security.manager import AUTH_DB
|
||||||
|
basedir = os.path.abspath(os.path.dirname(__file__))
|
||||||
|
WTF_CSRF_ENABLED = True
|
||||||
|
WTF_CSRF_TIME_LIMIT = None
|
||||||
|
AUTH_TYPE = AUTH_DB
|
||||||
|
AUTH_ROLE_ADMIN = 'Admin'
|
||||||
|
AUTH_ROLE_PUBLIC = 'Admin'
|
@ -26,6 +26,9 @@ ENV container docker
|
|||||||
ENV PORT 9000
|
ENV PORT 9000
|
||||||
ENV LC_ALL C.UTF-8
|
ENV LC_ALL C.UTF-8
|
||||||
ENV LANG C.UTF-8
|
ENV LANG C.UTF-8
|
||||||
|
|
||||||
|
ENV PYTHONWARNINGS=ignore::DeprecationWarning,ignore::FutureWarning
|
||||||
|
|
||||||
# Setting the version explicitly for PBR
|
# Setting the version explicitly for PBR
|
||||||
ENV PBR_VERSION 0.1a1
|
ENV PBR_VERSION 0.1a1
|
||||||
|
|
||||||
@ -112,10 +115,10 @@ COPY ${ctx_base}/shipyard_airflow /home/shipyard/shipyard/
|
|||||||
&& python3 -m pip install -U pip \
|
&& python3 -m pip install -U pip \
|
||||||
&& pip3 install -r /home/shipyard/client_requirements.txt --no-cache-dir \
|
&& pip3 install -r /home/shipyard/client_requirements.txt --no-cache-dir \
|
||||||
&& cd /home/shipyard/shipyard_client \
|
&& cd /home/shipyard/shipyard_client \
|
||||||
&& python3 setup.py install \
|
&& pip3 install . --use-pep517 --no-cache-dir \
|
||||||
&& pip3 install -r /home/shipyard/api_requirements.txt --no-cache-dir \
|
&& pip3 install -r /home/shipyard/api_requirements.txt --no-cache-dir \
|
||||||
&& cd /home/shipyard/shipyard \
|
&& cd /home/shipyard/shipyard \
|
||||||
&& python3 setup.py install \
|
&& pip3 install . --use-pep517 --no-cache-dir \
|
||||||
&& apt-get purge -y --auto-remove $buildDeps \
|
&& apt-get purge -y --auto-remove $buildDeps \
|
||||||
&& apt-get autoremove -yqq --purge \
|
&& apt-get autoremove -yqq --purge \
|
||||||
&& apt-get clean \
|
&& apt-get clean \
|
||||||
|
@ -50,7 +50,7 @@ if [ "$1" = 'server' ]; then
|
|||||||
--http :${PORT} \
|
--http :${PORT} \
|
||||||
--paste config:/etc/shipyard/api-paste.ini \
|
--paste config:/etc/shipyard/api-paste.ini \
|
||||||
--enable-threads \
|
--enable-threads \
|
||||||
-L \
|
--disable-logging \
|
||||||
--pyargv "--config-file /etc/shipyard/shipyard.conf" \
|
--pyargv "--config-file /etc/shipyard/shipyard.conf" \
|
||||||
--threads ${SHIPYARD_API_THREADS} \
|
--threads ${SHIPYARD_API_THREADS} \
|
||||||
--workers ${SHIPYARD_API_WORKERS} \
|
--workers ${SHIPYARD_API_WORKERS} \
|
||||||
|
@ -1,40 +1,55 @@
|
|||||||
alembic==1.0.1
|
alembic
|
||||||
apache-airflow[crypto,celery,hive,hdfs,jdbc]==1.10.5
|
apache-airflow[crypto,celery,hive,hdfs,jdbc,postgres]==2.6.2
|
||||||
arrow<=0.17.0
|
arrow
|
||||||
celery==4.3
|
celery
|
||||||
Flask==1.1.0
|
Flask
|
||||||
Flask-AppBuilder @ git+https://github.com/dpgaspar/Flask-AppBuilder.git@airflow_v1.13.2
|
Flask-AppBuilder
|
||||||
pytz==2018.5
|
pytz
|
||||||
coverage==5.3
|
|
||||||
falcon
|
falcon
|
||||||
jsonschema<=3.2.0
|
jsonschema
|
||||||
keystoneauth1<=5.1.1
|
|
||||||
kombu
|
|
||||||
kubernetes==26.1.0
|
|
||||||
marshmallow-sqlalchemy==0.18.0
|
|
||||||
networkx
|
networkx
|
||||||
oslo.config<=8.7.1
|
keystoneauth1==4.3.1
|
||||||
oslo.policy<=3.10.1
|
keystonemiddleware==9.3.0
|
||||||
oslo.utils<=4.12.3
|
kombu
|
||||||
psycopg2-binary==2.8.4
|
kubernetes
|
||||||
pyarrow==6.0.1
|
marshmallow-sqlalchemy
|
||||||
pylibyaml==0.1.0
|
mock
|
||||||
PyYAML<=5.4.1
|
oslo.cache==2.10.0
|
||||||
regex==2020.11.13
|
oslo.concurrency==4.2.0
|
||||||
requests==2.23.0
|
oslo.config==8.6.0
|
||||||
responses==0.12.1
|
oslo.context==4.0.0
|
||||||
setuptools<=45.2.0
|
oslo.db==11.2.0
|
||||||
SQLAlchemy==1.3.15
|
oslo.i18n==6.0.0
|
||||||
tabulate==0.8.03
|
oslo.log==4.8.0
|
||||||
ulid==1.1
|
oslo.messaging==12.13.1
|
||||||
uWSGI==2.0.21
|
oslo.metrics==0.4.0
|
||||||
Werkzeug<=0.16.1
|
oslo.middleware==4.4.0
|
||||||
WTForms<=2.3.3
|
oslo.policy==3.10.1
|
||||||
|
oslo.serialization==4.1.0
|
||||||
|
oslo.service==2.8.0
|
||||||
|
oslo.upgradecheck==2.1.1
|
||||||
|
oslo.utils==4.12.3
|
||||||
|
oslo.versionedobjects==2.4.0
|
||||||
|
psycopg2-binary
|
||||||
|
pyarrow
|
||||||
|
pylibyaml
|
||||||
|
python3-memcached
|
||||||
|
PyYAML
|
||||||
|
regex
|
||||||
|
requests
|
||||||
|
responses
|
||||||
|
setuptools
|
||||||
|
SQLAlchemy
|
||||||
|
tabulate
|
||||||
|
ulid
|
||||||
|
uWSGI
|
||||||
|
Werkzeug
|
||||||
|
WTForms
|
||||||
|
|
||||||
|
|
||||||
itsdangerous==2.0.1
|
itsdangerous
|
||||||
|
|
||||||
git+https://opendev.org/airship/deckhand.git@06b1631d8a31a1e7147e8751025d0ef2ff4b31f5#egg=deckhand
|
git+https://opendev.org/airship/deckhand.git@37ba567da5bf7f146f54a468478a025c70793574#egg=deckhand
|
||||||
git+https://opendev.org/airship/drydock.git@73d22a488f7f5b7217e86e86da9f38b5b9085009#egg=drydock_provisioner&subdirectory=python
|
git+https://opendev.org/airship/drydock.git@f99abfa4337f8cbb591513aac404b11208d4187c#egg=drydock_provisioner&subdirectory=python
|
||||||
git+https://opendev.org/airship/armada.git@812546c87595b1d8a1bc0da931d644473b03b31f#egg=armada
|
git+https://opendev.org/airship/armada.git@6595dd83ea65324196c89cf6fb83f168818822de#egg=armada
|
||||||
git+https://opendev.org/airship/promenade.git@2f2455f0d159287565a279a99c0bbeb235e81ffb#egg=promenade
|
git+https://opendev.org/airship/promenade.git@69a74590e76e810916f7780fc525c63ec58c7dc1#egg=promenade
|
||||||
|
@ -1,241 +0,0 @@
|
|||||||
alabaster==0.7.13
|
|
||||||
alembic==1.0.1
|
|
||||||
amqp==2.6.1
|
|
||||||
apache-airflow==1.10.5
|
|
||||||
apispec==1.3.3
|
|
||||||
argcomplete==3.0.8
|
|
||||||
Armada @ git+https://opendev.org/airship/armada.git@812546c87595b1d8a1bc0da931d644473b03b31f
|
|
||||||
arrow==0.17.0
|
|
||||||
attrs==22.2.0
|
|
||||||
autopage==0.5.1
|
|
||||||
Babel==2.11.0
|
|
||||||
bcrypt==4.0.1
|
|
||||||
Beaker==1.12.0
|
|
||||||
billiard==3.6.4.0
|
|
||||||
cached-property==1.5.2
|
|
||||||
cachetools==4.2.4
|
|
||||||
celery==4.3.0
|
|
||||||
certifi==2023.5.7
|
|
||||||
cffi==1.15.1
|
|
||||||
chardet==3.0.4
|
|
||||||
charset-normalizer==3.0.1
|
|
||||||
click==7.1.2
|
|
||||||
cliff==3.10.1
|
|
||||||
cmd2==2.4.3
|
|
||||||
colorama==0.4.5
|
|
||||||
colorlog==4.0.2
|
|
||||||
configparser==3.5.3
|
|
||||||
coverage==5.3
|
|
||||||
croniter==0.3.37
|
|
||||||
cryptography==3.4.8
|
|
||||||
debtcollector==2.5.0
|
|
||||||
Deckhand @ git+https://opendev.org/airship/deckhand.git@06b1631d8a31a1e7147e8751025d0ef2ff4b31f5
|
|
||||||
decorator==4.4.2
|
|
||||||
deepdiff==5.7.0
|
|
||||||
defusedxml==0.7.1
|
|
||||||
dill==0.2.9
|
|
||||||
distlib==0.3.6
|
|
||||||
dnspython==2.2.1
|
|
||||||
docutils==0.17
|
|
||||||
dogpile.cache==1.2.1
|
|
||||||
drydock-provisioner @ git+https://opendev.org/airship/drydock.git@73d22a488f7f5b7217e86e86da9f38b5b9085009#subdirectory=python
|
|
||||||
dulwich==0.20.50
|
|
||||||
dumb-init==1.2.5.post1
|
|
||||||
eventlet==0.33.3
|
|
||||||
extras==1.0.0
|
|
||||||
falcon==3.1.1
|
|
||||||
fasteners==0.18
|
|
||||||
filelock==3.4.1
|
|
||||||
fixtures==3.0.0
|
|
||||||
flake8==3.8.4
|
|
||||||
Flask==1.1.0
|
|
||||||
Flask-Admin==1.5.3
|
|
||||||
Flask-AppBuilder @ git+https://github.com/dpgaspar/Flask-AppBuilder.git@b71789d85b632935eca79c0b53fb77f20bf17fe6
|
|
||||||
Flask-Babel==0.12.2
|
|
||||||
Flask-Caching==1.3.3
|
|
||||||
Flask-JWT-Extended==3.25.1
|
|
||||||
Flask-Login==0.4.1
|
|
||||||
Flask-OpenID==1.3.0
|
|
||||||
Flask-SQLAlchemy==2.5.1
|
|
||||||
flask-swagger==0.2.13
|
|
||||||
Flask-WTF==0.14.3
|
|
||||||
flower==0.9.7
|
|
||||||
funcsigs==1.0.0
|
|
||||||
future==0.16.0
|
|
||||||
futurist==2.4.1
|
|
||||||
gitdb==4.0.9
|
|
||||||
GitPython==3.1.18
|
|
||||||
google-auth==2.19.0
|
|
||||||
greenlet==2.0.2
|
|
||||||
gunicorn==19.10.0
|
|
||||||
hacking==4.1.0
|
|
||||||
hmsclient==0.1.1
|
|
||||||
html5lib==0.9999999
|
|
||||||
httpexceptor==1.4.0
|
|
||||||
humanize==3.14.0
|
|
||||||
idna==2.10
|
|
||||||
imagesize==1.4.1
|
|
||||||
importlib-metadata==2.1.3
|
|
||||||
importlib-resources==5.4.0
|
|
||||||
iso8601==1.1.0
|
|
||||||
itsdangerous==2.0.1
|
|
||||||
JayDeBeApi==1.2.3
|
|
||||||
Jinja2==2.10.3
|
|
||||||
JPype1==1.3.0
|
|
||||||
json-merge-patch==0.2
|
|
||||||
jsonpath-ng==1.5.3
|
|
||||||
jsonpath-rw==1.4.0
|
|
||||||
jsonpath-rw-ext==1.2.2
|
|
||||||
jsonpickle==1.4.1
|
|
||||||
jsonschema==3.2.0
|
|
||||||
keystoneauth1==5.1.1
|
|
||||||
keystonemiddleware==9.5.0
|
|
||||||
kombu==4.6.3
|
|
||||||
kubernetes==26.1.0
|
|
||||||
lazy-object-proxy==1.7.1
|
|
||||||
lockfile==0.12.2
|
|
||||||
Mako==1.1.6
|
|
||||||
Markdown==2.6.11
|
|
||||||
MarkupSafe==2.0.1
|
|
||||||
marshmallow==2.19.5
|
|
||||||
marshmallow-enum==1.5.1
|
|
||||||
marshmallow-sqlalchemy==0.18.0
|
|
||||||
mccabe==0.6.1
|
|
||||||
mock==5.0.2
|
|
||||||
msgpack==1.0.5
|
|
||||||
natsort==8.2.0
|
|
||||||
netaddr==0.8.0
|
|
||||||
netifaces==0.11.0
|
|
||||||
networkx==2.5.1
|
|
||||||
nose==1.3.7
|
|
||||||
numpy==1.19.5
|
|
||||||
oauthlib==3.2.2
|
|
||||||
ordered-set==4.0.2
|
|
||||||
ordereddict==1.1
|
|
||||||
os-service-types==1.7.0
|
|
||||||
oslo.cache==2.10.1
|
|
||||||
oslo.concurrency==4.5.1
|
|
||||||
oslo.config==8.7.1
|
|
||||||
oslo.context==4.1.0
|
|
||||||
oslo.db==10.0.0
|
|
||||||
oslo.i18n==5.1.0
|
|
||||||
oslo.log==4.6.0
|
|
||||||
oslo.messaging==10.3.0
|
|
||||||
oslo.middleware==4.4.0
|
|
||||||
oslo.policy==3.10.1
|
|
||||||
oslo.serialization==4.2.0
|
|
||||||
oslo.service==2.8.0
|
|
||||||
oslo.utils==4.12.3
|
|
||||||
packaging==21.3
|
|
||||||
pandas==0.25.3
|
|
||||||
Paste==3.5.0
|
|
||||||
PasteDeploy==2.1.1
|
|
||||||
PasteScript==3.3.0
|
|
||||||
pbr==5.5.1
|
|
||||||
pendulum==1.4.4
|
|
||||||
pip==21.3.1
|
|
||||||
platformdirs==2.4.0
|
|
||||||
ply==3.11
|
|
||||||
prettytable==2.5.0
|
|
||||||
prison==0.1.0
|
|
||||||
promenade @ git+https://opendev.org/airship/promenade.git@2f2455f0d159287565a279a99c0bbeb235e81ffb
|
|
||||||
prometheus-client==0.8.0
|
|
||||||
protobuf==3.19.6
|
|
||||||
psutil==5.9.5
|
|
||||||
psycopg2-binary==2.8.4
|
|
||||||
py==1.11.0
|
|
||||||
pyarrow==6.0.1
|
|
||||||
pyasn1==0.5.0
|
|
||||||
pyasn1-modules==0.3.0
|
|
||||||
pycadf==3.1.1
|
|
||||||
pycodestyle==2.6.0
|
|
||||||
pycparser==2.21
|
|
||||||
pyflakes==2.2.0
|
|
||||||
Pygments==2.14.0
|
|
||||||
PyHive==0.6.5
|
|
||||||
PyJWT==1.7.1
|
|
||||||
pylibyaml==0.1.0
|
|
||||||
pymongo==4.1.1
|
|
||||||
pyparsing==2.4.7
|
|
||||||
pyperclip==1.8.2
|
|
||||||
pyproject-api==0.1.0
|
|
||||||
pyrsistent==0.18.0
|
|
||||||
python-barbicanclient==5.2.0
|
|
||||||
python-daemon==2.1.2
|
|
||||||
python-dateutil==2.8.1
|
|
||||||
python-editor==1.0.4
|
|
||||||
python-keystoneclient==4.5.0
|
|
||||||
python-memcached==1.59
|
|
||||||
python-mimeparse==1.6.0
|
|
||||||
python-subunit==1.4.0
|
|
||||||
python3-openid==3.2.0
|
|
||||||
pytz==2018.5
|
|
||||||
pytzdata==2020.1
|
|
||||||
PyYAML==5.4.1
|
|
||||||
regex==2020.11.13
|
|
||||||
reno==4.0.0
|
|
||||||
repoze.lru==0.7
|
|
||||||
requests==2.23.0
|
|
||||||
requests-oauthlib==1.3.1
|
|
||||||
resolver==0.2.1
|
|
||||||
responses==0.12.1
|
|
||||||
retry==0.9.2
|
|
||||||
rfc3986==1.5.0
|
|
||||||
Routes==2.5.1
|
|
||||||
rsa==4.9
|
|
||||||
selector==0.10.1
|
|
||||||
setproctitle==1.2.3
|
|
||||||
setuptools==45.2.0
|
|
||||||
simplejson==3.19.1
|
|
||||||
six==1.16.0
|
|
||||||
smmap==5.0.0
|
|
||||||
snakebite==2.11.0
|
|
||||||
snowballstemmer==2.2.0
|
|
||||||
Sphinx==3.3.1
|
|
||||||
sphinx-rtd-theme==0.5.0
|
|
||||||
sphinxcontrib-applehelp==1.0.2
|
|
||||||
sphinxcontrib-devhelp==1.0.2
|
|
||||||
sphinxcontrib-htmlhelp==2.0.0
|
|
||||||
sphinxcontrib-jsmath==1.0.1
|
|
||||||
sphinxcontrib-qthelp==1.0.3
|
|
||||||
sphinxcontrib-serializinghtml==1.1.5
|
|
||||||
SQLAlchemy==1.3.15
|
|
||||||
sqlalchemy-migrate==0.13.0
|
|
||||||
sqlparse==0.4.4
|
|
||||||
statsd==4.0.1
|
|
||||||
stevedore==3.5.2
|
|
||||||
tabulate==0.8.3
|
|
||||||
Tempita==0.5.2
|
|
||||||
tenacity==4.12.0
|
|
||||||
termcolor==1.1.0
|
|
||||||
testrepository==0.0.20
|
|
||||||
testresources==2.0.1
|
|
||||||
testscenarios==0.5.0
|
|
||||||
testtools==2.5.0
|
|
||||||
text-unidecode==1.2
|
|
||||||
thrift==0.16.0
|
|
||||||
tiddlyweb==2.4.3
|
|
||||||
tomli==1.2.3
|
|
||||||
tomlkit==0.11.6
|
|
||||||
tornado==5.1.1
|
|
||||||
typing-extensions==3.7.4.3
|
|
||||||
tzlocal==1.5.1
|
|
||||||
ulid==1.1
|
|
||||||
unicodecsv==0.14.1
|
|
||||||
urllib3==1.25.11
|
|
||||||
uWSGI==2.0.21
|
|
||||||
vine==1.3.0
|
|
||||||
virtualenv==20.16.2
|
|
||||||
wcwidth==0.2.6
|
|
||||||
WebOb==1.8.7
|
|
||||||
websocket-client==1.3.1
|
|
||||||
Werkzeug==0.16.1
|
|
||||||
wheel==0.37.1
|
|
||||||
wrapt==1.15.0
|
|
||||||
wsgi-intercept==1.11.0
|
|
||||||
WTForms==2.3.3
|
|
||||||
xmltodict==0.13.0
|
|
||||||
yappi==1.4.0
|
|
||||||
yq==3.2.1
|
|
||||||
zipp==3.6.0
|
|
||||||
zope.deprecation==4.4.0
|
|
@ -1,240 +1,269 @@
|
|||||||
alabaster==0.7.13
|
aiohttp==3.8.4
|
||||||
alembic==1.0.1
|
aiosignal==1.3.1
|
||||||
amqp==2.6.1
|
alembic==1.11.1
|
||||||
apache-airflow==1.10.5
|
amqp==5.1.1
|
||||||
apispec==1.3.3
|
anyio==3.7.0
|
||||||
argcomplete==3.0.8
|
apache-airflow==2.6.2
|
||||||
Armada @ git+https://opendev.org/airship/armada.git@4be7fa44a743fbcdf792e2bdca3af6c95b540f97
|
apache-airflow-providers-apache-hdfs==4.0.0
|
||||||
arrow==0.17.0
|
apache-airflow-providers-apache-hive==6.1.0
|
||||||
|
apache-airflow-providers-celery==3.2.0
|
||||||
|
apache-airflow-providers-common-sql==1.5.1
|
||||||
|
apache-airflow-providers-ftp==3.4.1
|
||||||
|
apache-airflow-providers-http==4.4.1
|
||||||
|
apache-airflow-providers-imap==3.2.1
|
||||||
|
apache-airflow-providers-jdbc==3.4.0
|
||||||
|
apache-airflow-providers-postgres==5.5.0
|
||||||
|
apache-airflow-providers-sqlite==3.4.1
|
||||||
|
apispec==5.2.2
|
||||||
|
argcomplete==3.1.1
|
||||||
|
Armada @ git+https://opendev.org/airship/armada.git@6595dd83ea65324196c89cf6fb83f168818822de
|
||||||
|
arrow==1.2.3
|
||||||
|
asgiref==3.7.2
|
||||||
|
async-timeout==4.0.2
|
||||||
attrs==23.1.0
|
attrs==23.1.0
|
||||||
autopage==0.5.1
|
autopage==0.5.1
|
||||||
Babel==2.12.1
|
Babel==2.12.1
|
||||||
|
backports.zoneinfo==0.2.1
|
||||||
|
barbican==14.0.2
|
||||||
bcrypt==4.0.1
|
bcrypt==4.0.1
|
||||||
Beaker==1.12.0
|
Beaker==1.12.1
|
||||||
billiard==3.6.4.0
|
billiard==4.1.0
|
||||||
cached-property==1.5.2
|
blinker==1.6.2
|
||||||
cachetools==5.3.0
|
cachelib==0.9.0
|
||||||
celery==4.3.0
|
cachetools==5.3.1
|
||||||
|
castellan==4.1.0
|
||||||
|
cattrs==23.1.2
|
||||||
|
celery==5.3.0
|
||||||
certifi==2023.5.7
|
certifi==2023.5.7
|
||||||
cffi==1.15.1
|
cffi==1.15.1
|
||||||
chardet==3.0.4
|
|
||||||
charset-normalizer==3.1.0
|
charset-normalizer==3.1.0
|
||||||
click==7.1.2
|
click==8.1.3
|
||||||
cliff==3.10.1
|
click-didyoumean==0.3.0
|
||||||
|
click-plugins==1.1.1
|
||||||
|
click-repl==0.2.0
|
||||||
|
clickclick==20.10.2
|
||||||
|
cliff==4.3.0
|
||||||
cmd2==2.4.3
|
cmd2==2.4.3
|
||||||
colorama==0.4.6
|
colorama==0.4.6
|
||||||
colorlog==4.0.2
|
colorlog==4.8.0
|
||||||
configparser==3.5.3
|
ConfigUpdater==3.1.1
|
||||||
coverage==5.3
|
connexion==2.14.2
|
||||||
croniter==0.3.37
|
cron-descriptor==1.4.0
|
||||||
cryptography==3.4.8
|
croniter==1.3.15
|
||||||
|
cryptography==40.0.2
|
||||||
debtcollector==2.5.0
|
debtcollector==2.5.0
|
||||||
Deckhand @ git+https://opendev.org/airship/deckhand.git@3a06b1b6040865483a09514e2bf7dc5fae24560e
|
Deckhand @ git+https://opendev.org/airship/deckhand.git@37ba567da5bf7f146f54a468478a025c70793574
|
||||||
decorator==5.1.1
|
decorator==5.1.1
|
||||||
deepdiff==5.8.1
|
deepdiff==6.3.1
|
||||||
defusedxml==0.7.1
|
Deprecated==1.2.14
|
||||||
dill==0.2.9
|
dill==0.3.1.1
|
||||||
distlib==0.3.6
|
|
||||||
dnspython==2.3.0
|
dnspython==2.3.0
|
||||||
docutils==0.17
|
docopt==0.6.2
|
||||||
dogpile.cache==1.2.0
|
docutils==0.20.1
|
||||||
drydock-provisioner @ git+https://opendev.org/airship/drydock.git@375abedb8aa3a413ca70a47aef467f39d65e1aee#subdirectory=python
|
dogpile.cache==1.2.2
|
||||||
dulwich==0.21.5
|
drydock-provisioner @ git+https://opendev.org/airship/drydock.git@f99abfa4337f8cbb591513aac404b11208d4187c#subdirectory=python
|
||||||
dumb-init==1.2.5.post1
|
email-validator==1.3.1
|
||||||
eventlet==0.33.3
|
eventlet==0.33.3
|
||||||
extras==1.0.0
|
exceptiongroup==1.1.1
|
||||||
falcon==3.1.1
|
falcon==3.1.1
|
||||||
|
fastavro==1.7.4
|
||||||
fasteners==0.18
|
fasteners==0.18
|
||||||
filelock==3.12.0
|
fixtures==4.1.0
|
||||||
fixtures==3.0.0
|
Flask==2.2.5
|
||||||
flake8==3.8.4
|
Flask-AppBuilder==4.3.1
|
||||||
Flask==1.1.0
|
Flask-Babel==2.0.0
|
||||||
Flask-Admin==1.5.3
|
Flask-Caching==2.0.2
|
||||||
Flask-AppBuilder @ git+https://github.com/dpgaspar/Flask-AppBuilder.git@b71789d85b632935eca79c0b53fb77f20bf17fe6
|
Flask-JWT-Extended==4.5.2
|
||||||
Flask-Babel==0.12.2
|
Flask-Limiter==3.3.1
|
||||||
Flask-Caching==1.3.3
|
Flask-Login==0.6.2
|
||||||
Flask-JWT-Extended==3.25.1
|
Flask-Session==0.5.0
|
||||||
Flask-Login==0.4.1
|
|
||||||
Flask-OpenID==1.3.0
|
|
||||||
Flask-SQLAlchemy==2.5.1
|
Flask-SQLAlchemy==2.5.1
|
||||||
flask-swagger==0.2.13
|
Flask-WTF==1.1.1
|
||||||
Flask-WTF==0.14.3
|
flower==1.2.0
|
||||||
flower==0.9.7
|
frozenlist==1.3.3
|
||||||
funcsigs==1.0.0
|
future==0.18.3
|
||||||
future==0.16.0
|
|
||||||
futurist==2.4.1
|
futurist==2.4.1
|
||||||
gitdb==4.0.10
|
gitdb==4.0.10
|
||||||
GitPython==3.1.31
|
GitPython==3.1.31
|
||||||
google-auth==2.18.0
|
google-auth==2.20.0
|
||||||
|
graphviz==0.20.1
|
||||||
greenlet==2.0.2
|
greenlet==2.0.2
|
||||||
gunicorn==19.10.0
|
gssapi==1.8.2
|
||||||
hacking==4.1.0
|
gunicorn==20.1.0
|
||||||
|
h11==0.14.0
|
||||||
|
hdfs==2.7.0
|
||||||
hmsclient==0.1.1
|
hmsclient==0.1.1
|
||||||
html5lib==0.9999999
|
html5lib==0.9999999
|
||||||
|
httpcore==0.16.3
|
||||||
httpexceptor==1.4.0
|
httpexceptor==1.4.0
|
||||||
|
httpx==0.23.3
|
||||||
humanize==4.6.0
|
humanize==4.6.0
|
||||||
idna==2.10
|
idna==3.4
|
||||||
imagesize==1.4.1
|
importlib-metadata==4.13.0
|
||||||
importlib-metadata==2.1.3
|
importlib-resources==5.12.0
|
||||||
iso8601==1.1.0
|
inflection==0.5.1
|
||||||
itsdangerous==2.0.1
|
iso8601==2.0.0
|
||||||
|
itsdangerous==2.1.2
|
||||||
JayDeBeApi==1.2.3
|
JayDeBeApi==1.2.3
|
||||||
Jinja2==2.10.3
|
Jinja2==3.1.2
|
||||||
JPype1==1.4.1
|
JPype1==1.4.1
|
||||||
json-merge-patch==0.2
|
|
||||||
jsonpath-ng==1.5.3
|
jsonpath-ng==1.5.3
|
||||||
jsonpath-rw==1.4.0
|
jsonpickle==3.0.1
|
||||||
jsonpath-rw-ext==1.2.2
|
jsonschema==4.17.3
|
||||||
jsonpickle==1.4.1
|
keystoneauth1==4.3.1
|
||||||
jsonschema==3.2.0
|
keystonemiddleware==9.3.0
|
||||||
keystoneauth1==5.1.1
|
kombu==5.3.0
|
||||||
keystonemiddleware==10.2.0
|
krb5==0.5.0
|
||||||
kombu==4.6.3
|
kubernetes==23.6.0
|
||||||
kubernetes==26.1.0
|
|
||||||
lazy-object-proxy==1.9.0
|
lazy-object-proxy==1.9.0
|
||||||
|
ldap3==2.9.1
|
||||||
|
limits==3.5.0
|
||||||
|
linkify-it-py==2.0.2
|
||||||
lockfile==0.12.2
|
lockfile==0.12.2
|
||||||
|
logutils==0.3.5
|
||||||
Mako==1.2.4
|
Mako==1.2.4
|
||||||
Markdown==2.6.11
|
Markdown==3.4.3
|
||||||
MarkupSafe==2.0.1
|
markdown-it-py==3.0.0
|
||||||
marshmallow==2.19.5
|
MarkupSafe==2.1.3
|
||||||
|
marshmallow==3.19.0
|
||||||
marshmallow-enum==1.5.1
|
marshmallow-enum==1.5.1
|
||||||
marshmallow-sqlalchemy==0.18.0
|
marshmallow-oneofschema==3.0.1
|
||||||
mccabe==0.6.1
|
marshmallow-sqlalchemy==0.26.1
|
||||||
mock==5.0.2
|
mdit-py-plugins==0.4.0
|
||||||
|
mdurl==0.1.2
|
||||||
|
mock==5.1.0
|
||||||
msgpack==1.0.5
|
msgpack==1.0.5
|
||||||
natsort==8.3.1
|
multidict==6.0.4
|
||||||
netaddr==0.8.0
|
netaddr==0.8.0
|
||||||
netifaces==0.11.0
|
netifaces==0.11.0
|
||||||
networkx==3.1
|
networkx==3.1
|
||||||
nose==1.3.7
|
|
||||||
numpy==1.24.3
|
numpy==1.24.3
|
||||||
oauthlib==3.2.2
|
oauthlib==3.2.2
|
||||||
ordered-set==4.1.0
|
ordered-set==4.1.0
|
||||||
ordereddict==1.1
|
|
||||||
os-service-types==1.7.0
|
os-service-types==1.7.0
|
||||||
oslo.cache==2.10.1
|
oslo.cache==2.10.0
|
||||||
oslo.concurrency==5.1.1
|
oslo.concurrency==4.2.0
|
||||||
oslo.config==8.7.1
|
oslo.config==8.6.0
|
||||||
oslo.context==4.1.0
|
oslo.context==4.0.0
|
||||||
oslo.db==10.0.0
|
oslo.db==11.2.0
|
||||||
oslo.i18n==6.0.0
|
oslo.i18n==6.0.0
|
||||||
oslo.log==4.6.0
|
oslo.log==4.8.0
|
||||||
oslo.messaging==10.3.0
|
oslo.messaging==12.13.1
|
||||||
|
oslo.metrics==0.4.0
|
||||||
oslo.middleware==4.4.0
|
oslo.middleware==4.4.0
|
||||||
oslo.policy==3.10.1
|
oslo.policy==3.10.1
|
||||||
oslo.serialization==4.2.0
|
oslo.serialization==4.1.0
|
||||||
oslo.service==3.1.1
|
oslo.service==2.8.0
|
||||||
|
oslo.upgradecheck==2.1.1
|
||||||
oslo.utils==4.12.3
|
oslo.utils==4.12.3
|
||||||
|
oslo.versionedobjects==2.4.0
|
||||||
packaging==21.3
|
packaging==21.3
|
||||||
pandas==0.25.3
|
pandas==1.5.3
|
||||||
Paste==3.5.0
|
Paste==3.5.3
|
||||||
PasteDeploy==3.0.1
|
PasteDeploy==3.0.1
|
||||||
PasteScript==3.3.0
|
PasteScript==3.3.0
|
||||||
pbr==5.5.1
|
pathspec==0.9.0
|
||||||
pendulum==1.4.4
|
pbr==5.11.1
|
||||||
|
pecan==1.5.1
|
||||||
|
pendulum==2.1.2
|
||||||
pip==23.1.2
|
pip==23.1.2
|
||||||
platformdirs==3.5.1
|
pkgutil_resolve_name==1.3.10
|
||||||
|
pluggy==1.0.0
|
||||||
ply==3.11
|
ply==3.11
|
||||||
prettytable==3.7.0
|
prettytable==3.8.0
|
||||||
prison==0.1.0
|
prison==0.2.1
|
||||||
promenade @ git+https://opendev.org/airship/promenade.git@45bcba068eb42fe6ba54d494b12122600dcb2c6c
|
promenade @ git+https://opendev.org/airship/promenade.git@69a74590e76e810916f7780fc525c63ec58c7dc1
|
||||||
prometheus-client==0.8.0
|
prometheus-client==0.17.0
|
||||||
protobuf==4.23.0
|
prompt-toolkit==3.0.38
|
||||||
psutil==5.9.5
|
psutil==5.9.5
|
||||||
psycopg2-binary==2.8.4
|
psycopg2-binary==2.9.6
|
||||||
|
pure-sasl==0.6.2
|
||||||
py==1.11.0
|
py==1.11.0
|
||||||
pyarrow==6.0.1
|
pyarrow==11.0.0
|
||||||
pyasn1==0.5.0
|
pyasn1==0.4.8
|
||||||
pyasn1-modules==0.3.0
|
pyasn1-modules==0.2.8
|
||||||
pycadf==3.1.1
|
pycadf==3.1.1
|
||||||
pycodestyle==2.6.0
|
|
||||||
pycparser==2.21
|
pycparser==2.21
|
||||||
pyflakes==2.2.0
|
pydantic==1.10.9
|
||||||
Pygments==2.14.0
|
Pygments==2.15.1
|
||||||
PyHive==0.6.5
|
PyHive==0.6.5
|
||||||
PyJWT==1.7.1
|
PyJWT==2.7.0
|
||||||
pylibyaml==0.1.0
|
pylibyaml==0.1.0
|
||||||
pymongo==4.3.3
|
pyOpenSSL==23.2.0
|
||||||
pyparsing==2.4.7
|
pyparsing==3.0.9
|
||||||
pyperclip==1.8.2
|
pyperclip==1.8.2
|
||||||
pyproject_api==1.5.0
|
|
||||||
pyrsistent==0.19.3
|
pyrsistent==0.19.3
|
||||||
python-barbicanclient==5.2.0
|
pyspnego==0.9.1
|
||||||
python-daemon==2.1.2
|
python-barbicanclient==5.4.0
|
||||||
python-dateutil==2.8.1
|
python-daemon==3.0.1
|
||||||
python-editor==1.0.4
|
python-dateutil==2.8.2
|
||||||
python-keystoneclient==5.1.0
|
python-keystoneclient==5.1.0
|
||||||
python-memcached==1.59
|
python-memcached==1.59
|
||||||
python-mimeparse==1.6.0
|
python-mimeparse==1.6.0
|
||||||
python-subunit==1.4.0
|
python-nvd3==0.15.0
|
||||||
python3-openid==3.2.0
|
python-slugify==8.0.1
|
||||||
pytz==2018.5
|
python3-memcached==1.51
|
||||||
|
pytz==2023.3
|
||||||
pytzdata==2020.1
|
pytzdata==2020.1
|
||||||
PyYAML==5.4.1
|
PyYAML==6.0
|
||||||
regex==2020.11.13
|
regex==2023.6.3
|
||||||
reno==4.0.0
|
|
||||||
repoze.lru==0.7
|
repoze.lru==0.7
|
||||||
requests==2.23.0
|
requests==2.31.0
|
||||||
|
requests-kerberos==0.14.0
|
||||||
requests-oauthlib==1.3.1
|
requests-oauthlib==1.3.1
|
||||||
|
requests-toolbelt==1.0.0
|
||||||
resolver==0.2.1
|
resolver==0.2.1
|
||||||
responses==0.12.1
|
responses==0.23.1
|
||||||
retry==0.9.2
|
retry==0.9.2
|
||||||
rfc3986==2.0.0
|
rfc3339-validator==0.1.4
|
||||||
|
rfc3986==1.5.0
|
||||||
|
rich==13.4.2
|
||||||
|
rich_argparse==1.1.1
|
||||||
Routes==2.5.1
|
Routes==2.5.1
|
||||||
rsa==4.9
|
rsa==4.9
|
||||||
|
sasl==0.3.1
|
||||||
selector==0.10.1
|
selector==0.10.1
|
||||||
setproctitle==1.3.2
|
setproctitle==1.3.2
|
||||||
setuptools==45.2.0
|
setuptools==67.7.2
|
||||||
simplejson==3.19.1
|
simplejson==3.19.1
|
||||||
six==1.16.0
|
six==1.16.0
|
||||||
smmap==5.0.0
|
smmap==5.0.0
|
||||||
snakebite==2.11.0
|
sniffio==1.3.0
|
||||||
snowballstemmer==2.2.0
|
SQLAlchemy==1.4.48
|
||||||
Sphinx==3.3.1
|
SQLAlchemy-JSONField==1.0.1.post0
|
||||||
sphinx-rtd-theme==0.5.0
|
|
||||||
sphinxcontrib-applehelp==1.0.4
|
|
||||||
sphinxcontrib-devhelp==1.0.2
|
|
||||||
sphinxcontrib-htmlhelp==2.0.1
|
|
||||||
sphinxcontrib-jsmath==1.0.1
|
|
||||||
sphinxcontrib-qthelp==1.0.3
|
|
||||||
sphinxcontrib-serializinghtml==1.1.5
|
|
||||||
SQLAlchemy==1.3.15
|
|
||||||
sqlalchemy-migrate==0.13.0
|
sqlalchemy-migrate==0.13.0
|
||||||
|
SQLAlchemy-Utils==0.41.1
|
||||||
sqlparse==0.4.4
|
sqlparse==0.4.4
|
||||||
statsd==4.0.1
|
statsd==4.0.1
|
||||||
stevedore==5.1.0
|
stevedore==5.1.0
|
||||||
tabulate==0.8.3
|
tabulate==0.9.0
|
||||||
Tempita==0.5.2
|
Tempita==0.5.2
|
||||||
tenacity==4.12.0
|
tenacity==8.2.2
|
||||||
termcolor==1.1.0
|
termcolor==2.3.0
|
||||||
testrepository==0.0.20
|
|
||||||
testresources==2.0.1
|
testresources==2.0.1
|
||||||
testscenarios==0.5.0
|
testscenarios==0.5.0
|
||||||
testtools==2.5.0
|
testtools==2.6.0
|
||||||
text-unidecode==1.2
|
text-unidecode==1.3
|
||||||
thrift==0.16.0
|
thrift==0.16.0
|
||||||
|
thrift-sasl==0.4.3
|
||||||
tiddlyweb==2.4.3
|
tiddlyweb==2.4.3
|
||||||
tomli==2.0.1
|
tornado==6.3.2
|
||||||
tomlkit==0.11.8
|
types-PyYAML==6.0.12.10
|
||||||
tornado==5.1.1
|
typing_extensions==4.6.3
|
||||||
typing-extensions==3.7.2
|
tzdata==2023.3
|
||||||
tzlocal==1.5.1
|
uc-micro-py==1.0.2
|
||||||
ulid==1.1
|
ulid==1.1
|
||||||
unicodecsv==0.14.1
|
unicodecsv==0.14.1
|
||||||
urllib3==1.25.11
|
urllib3==1.26.16
|
||||||
uWSGI==2.0.21
|
uWSGI==2.0.22
|
||||||
vine==1.3.0
|
vine==5.0.0
|
||||||
virtualenv==20.23.0
|
|
||||||
wcwidth==0.2.6
|
wcwidth==0.2.6
|
||||||
WebOb==1.8.7
|
WebOb==1.8.7
|
||||||
websocket-client==1.5.1
|
websocket-client==1.5.3
|
||||||
Werkzeug==0.16.1
|
Werkzeug==2.2.3
|
||||||
wheel==0.40.0
|
wheel==0.40.0
|
||||||
wrapt==1.15.0
|
wrapt==1.15.0
|
||||||
wsgi-intercept==1.11.0
|
WTForms==3.0.1
|
||||||
WTForms==2.3.3
|
|
||||||
xmltodict==0.13.0
|
|
||||||
yappi==1.4.0
|
yappi==1.4.0
|
||||||
yq==3.2.1
|
yarl==1.9.2
|
||||||
zipp==3.15.0
|
zipp==3.15.0
|
||||||
zope.deprecation==4.4.0
|
|
||||||
|
@ -336,8 +336,9 @@ class ActionsResource(BaseResource):
|
|||||||
# "conf" - JSON string that gets pickled into the DagRun's
|
# "conf" - JSON string that gets pickled into the DagRun's
|
||||||
# conf attribute. The conf is passed as as a string of escaped
|
# conf attribute. The conf is passed as as a string of escaped
|
||||||
# json inside the json payload accepted by the API.
|
# json inside the json payload accepted by the API.
|
||||||
conf_value = self.to_json({'action': action})
|
# conf_value = self.to_json({'action': action})
|
||||||
payload = {'run_id': action['id'], 'conf': conf_value}
|
payload = {'run_id': action['id'], 'conf': {'action': action}}
|
||||||
|
LOG.info('Request payload: %s', payload)
|
||||||
try:
|
try:
|
||||||
resp = requests.post(req_url, timeout=(c_timeout, r_timeout),
|
resp = requests.post(req_url, timeout=(c_timeout, r_timeout),
|
||||||
headers=headers, json=payload)
|
headers=headers, json=payload)
|
||||||
|
@ -89,12 +89,12 @@ class ActionsIdResource(BaseResource):
|
|||||||
action['notes'].append(note.view())
|
action['notes'].append(note.view())
|
||||||
return action
|
return action
|
||||||
|
|
||||||
def get_dag_run_by_id(self, dag_id, execution_date):
|
def get_dag_run_by_id(self, dag_id, run_id):
|
||||||
"""
|
"""
|
||||||
Wrapper for call to the airflow db to get a dag_run
|
Wrapper for call to the airflow db to get a dag_run
|
||||||
:returns: a dag run dictionary
|
:returns: a dag run dictionary
|
||||||
"""
|
"""
|
||||||
dag_run_list = self.get_dag_run_db(dag_id, execution_date)
|
dag_run_list = self.get_dag_run_db(dag_id, run_id)
|
||||||
# should be only one result, return the first one
|
# should be only one result, return the first one
|
||||||
if dag_run_list:
|
if dag_run_list:
|
||||||
return dag_run_list[0]
|
return dag_run_list[0]
|
||||||
|
@ -76,7 +76,7 @@ class WorkflowIdResource(BaseResource):
|
|||||||
"""
|
"""
|
||||||
Retrieve a workflow by id,
|
Retrieve a workflow by id,
|
||||||
:param helper: The WorkflowHelper constructed for this invocation
|
:param helper: The WorkflowHelper constructed for this invocation
|
||||||
:param workflow_id: a string in {dag_id}__{execution_date} format
|
:param workflow_id: a string in {dag_id}__{run_id} format
|
||||||
identifying a workflow
|
identifying a workflow
|
||||||
:returns: a workflow detail dictionary including steps
|
:returns: a workflow detail dictionary including steps
|
||||||
"""
|
"""
|
||||||
|
@ -19,7 +19,7 @@ to the api startup to handle the Falcon specific setup.
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from werkzeug.contrib.profiler import ProfilerMiddleware
|
from werkzeug.middleware.profiler import ProfilerMiddleware
|
||||||
|
|
||||||
from shipyard_airflow.conf import config
|
from shipyard_airflow.conf import config
|
||||||
import shipyard_airflow.control.api as api
|
import shipyard_airflow.control.api as api
|
||||||
|
@ -16,8 +16,14 @@
|
|||||||
"""
|
"""
|
||||||
import airflow
|
import airflow
|
||||||
from airflow import DAG
|
from airflow import DAG
|
||||||
from airflow.operators import OpenStackOperator
|
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
|
try:
|
||||||
|
from airflow.operators import OpenStackOperator
|
||||||
|
from config_path import config_path
|
||||||
|
except ImportError:
|
||||||
|
from shipyard_airflow.plugins.openstack_operators import \
|
||||||
|
OpenStackOperator
|
||||||
|
from shipyard_airflow.dags.config_path import config_path
|
||||||
|
|
||||||
|
|
||||||
default_args = {
|
default_args = {
|
||||||
@ -31,10 +37,11 @@ default_args = {
|
|||||||
'retry_delay': timedelta(minutes=1),
|
'retry_delay': timedelta(minutes=1),
|
||||||
}
|
}
|
||||||
|
|
||||||
dag = DAG('openstack_cli', default_args=default_args, schedule_interval=None)
|
dag = DAG('openstack_cli', default_args=default_args,
|
||||||
|
schedule_interval=None)
|
||||||
|
|
||||||
# Location of shiyard.conf
|
# # Location of shiyard.conf
|
||||||
config_path = '/usr/local/airflow/plugins/shipyard.conf'
|
# config_path = '/usr/local/airflow/plugins/shipyard.conf'
|
||||||
|
|
||||||
# Note that the shipyard.conf file needs to be placed on a volume
|
# Note that the shipyard.conf file needs to be placed on a volume
|
||||||
# that can be accessed by the containers
|
# that can be accessed by the containers
|
||||||
|
@ -37,9 +37,9 @@ class AirflowDbAccess(DbAccess):
|
|||||||
SELECT
|
SELECT
|
||||||
"id",
|
"id",
|
||||||
"dag_id",
|
"dag_id",
|
||||||
"execution_date",
|
|
||||||
"state",
|
"state",
|
||||||
"run_id",
|
"run_id",
|
||||||
|
"execution_date",
|
||||||
"external_trigger",
|
"external_trigger",
|
||||||
"conf",
|
"conf",
|
||||||
"end_date",
|
"end_date",
|
||||||
@ -52,9 +52,9 @@ class AirflowDbAccess(DbAccess):
|
|||||||
SELECT
|
SELECT
|
||||||
"id",
|
"id",
|
||||||
"dag_id",
|
"dag_id",
|
||||||
"execution_date",
|
|
||||||
"state",
|
"state",
|
||||||
"run_id",
|
"run_id",
|
||||||
|
"execution_date",
|
||||||
"external_trigger",
|
"external_trigger",
|
||||||
"conf",
|
"conf",
|
||||||
"end_date",
|
"end_date",
|
||||||
@ -73,9 +73,9 @@ class AirflowDbAccess(DbAccess):
|
|||||||
SELECT
|
SELECT
|
||||||
"id",
|
"id",
|
||||||
"dag_id",
|
"dag_id",
|
||||||
"execution_date",
|
|
||||||
"state",
|
"state",
|
||||||
"run_id",
|
"run_id",
|
||||||
|
"execution_date",
|
||||||
"external_trigger",
|
"external_trigger",
|
||||||
"conf",
|
"conf",
|
||||||
"end_date",
|
"end_date",
|
||||||
@ -93,6 +93,7 @@ class AirflowDbAccess(DbAccess):
|
|||||||
"task_id",
|
"task_id",
|
||||||
"dag_id",
|
"dag_id",
|
||||||
"execution_date",
|
"execution_date",
|
||||||
|
"dr"."run_id",
|
||||||
"start_date",
|
"start_date",
|
||||||
"end_date",
|
"end_date",
|
||||||
"duration",
|
"duration",
|
||||||
@ -109,7 +110,19 @@ class AirflowDbAccess(DbAccess):
|
|||||||
"pid",
|
"pid",
|
||||||
"max_tries"
|
"max_tries"
|
||||||
FROM
|
FROM
|
||||||
task_instance
|
task_instance ti
|
||||||
|
INNER JOIN
|
||||||
|
(
|
||||||
|
SELECT
|
||||||
|
"execution_date",
|
||||||
|
"run_id"
|
||||||
|
FROM
|
||||||
|
dag_run
|
||||||
|
GROUP BY
|
||||||
|
run_id,
|
||||||
|
execution_date
|
||||||
|
) dr
|
||||||
|
ON ti.run_id=dr.run_id
|
||||||
ORDER BY
|
ORDER BY
|
||||||
priority_weight desc,
|
priority_weight desc,
|
||||||
start_date
|
start_date
|
||||||
@ -119,33 +132,48 @@ class AirflowDbAccess(DbAccess):
|
|||||||
# used to merge into this query.
|
# used to merge into this query.
|
||||||
SELECT_TASKS_BY_ID = sqlalchemy.sql.text('''
|
SELECT_TASKS_BY_ID = sqlalchemy.sql.text('''
|
||||||
SELECT
|
SELECT
|
||||||
"task_id",
|
"task_id",
|
||||||
"dag_id",
|
"dag_id",
|
||||||
"execution_date",
|
"execution_date",
|
||||||
"start_date",
|
"dr"."run_id",
|
||||||
"end_date",
|
"start_date",
|
||||||
"duration",
|
"end_date",
|
||||||
"state",
|
"duration",
|
||||||
"try_number",
|
"state",
|
||||||
"hostname",
|
"try_number",
|
||||||
"unixname",
|
"hostname",
|
||||||
"job_id",
|
"unixname",
|
||||||
"pool",
|
"job_id",
|
||||||
"queue",
|
"pool",
|
||||||
"priority_weight",
|
"queue",
|
||||||
"operator",
|
"priority_weight",
|
||||||
"queued_dttm",
|
"operator",
|
||||||
"pid",
|
"queued_dttm",
|
||||||
"max_tries"
|
"pid",
|
||||||
FROM
|
"max_tries"
|
||||||
task_instance
|
FROM
|
||||||
WHERE
|
task_instance ti
|
||||||
dag_id LIKE :dag_id
|
INNER JOIN
|
||||||
AND
|
(
|
||||||
execution_date = :execution_date
|
SELECT
|
||||||
ORDER BY
|
"execution_date",
|
||||||
priority_weight desc,
|
"run_id"
|
||||||
start_date
|
FROM
|
||||||
|
dag_run
|
||||||
|
GROUP BY
|
||||||
|
run_id,
|
||||||
|
execution_date
|
||||||
|
) dr
|
||||||
|
ON
|
||||||
|
ti.run_id=dr.run_id
|
||||||
|
WHERE
|
||||||
|
dag_id LIKE :dag_id
|
||||||
|
AND
|
||||||
|
execution_date = :execution_date
|
||||||
|
ORDER BY
|
||||||
|
priority_weight desc,
|
||||||
|
start_date
|
||||||
|
LIMIT 1
|
||||||
''')
|
''')
|
||||||
|
|
||||||
UPDATE_DAG_RUN_STATUS = sqlalchemy.sql.text('''
|
UPDATE_DAG_RUN_STATUS = sqlalchemy.sql.text('''
|
||||||
|
@ -53,7 +53,7 @@ class DbAccess:
|
|||||||
"""
|
"""
|
||||||
Unimplemented method for use in overriding to peform db updates
|
Unimplemented method for use in overriding to peform db updates
|
||||||
"""
|
"""
|
||||||
LOG.info('No databse version updates specified for %s',
|
LOG.info('No database version updates specified for %s',
|
||||||
self.__class__.__name__)
|
self.__class__.__name__)
|
||||||
|
|
||||||
def get_engine(self):
|
def get_engine(self):
|
||||||
|
@ -81,7 +81,6 @@ class ArmadaBaseOperator(UcpBaseOperator):
|
|||||||
|
|
||||||
# Logs uuid of action performed by the Operator
|
# Logs uuid of action performed by the Operator
|
||||||
LOG.info("Armada Operator for action %s", self.action_id)
|
LOG.info("Armada Operator for action %s", self.action_id)
|
||||||
|
|
||||||
# Set up armada client
|
# Set up armada client
|
||||||
self.armada_client = self._init_armada_client(
|
self.armada_client = self._init_armada_client(
|
||||||
self.endpoints.endpoint_by_name(service_endpoint.ARMADA),
|
self.endpoints.endpoint_by_name(service_endpoint.ARMADA),
|
||||||
|
@ -12,6 +12,7 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from datetime import timezone
|
||||||
import logging
|
import logging
|
||||||
# Using nosec to prevent Bandit blacklist reporting. Subprocess is used
|
# Using nosec to prevent Bandit blacklist reporting. Subprocess is used
|
||||||
# in a controlled way as part of this operator.
|
# in a controlled way as part of this operator.
|
||||||
@ -43,7 +44,8 @@ class DeckhandCreateSiteActionTagOperator(DeckhandBaseOperator):
|
|||||||
def do_execute(self):
|
def do_execute(self):
|
||||||
|
|
||||||
# Calculate total elapsed time for workflow
|
# Calculate total elapsed time for workflow
|
||||||
time_delta = datetime.now() - self.task_instance.execution_date
|
time_delta = datetime.now(timezone.utc) \
|
||||||
|
- self.task_instance.execution_date
|
||||||
|
|
||||||
hours, remainder = divmod(time_delta.seconds, 3600)
|
hours, remainder = divmod(time_delta.seconds, 3600)
|
||||||
minutes, seconds = divmod(remainder, 60)
|
minutes, seconds = divmod(remainder, 60)
|
||||||
@ -88,7 +90,8 @@ class DeckhandCreateSiteActionTagOperator(DeckhandBaseOperator):
|
|||||||
# container.
|
# container.
|
||||||
response = subprocess.run( # nosec
|
response = subprocess.run( # nosec
|
||||||
['airflow',
|
['airflow',
|
||||||
'task_state',
|
'tasks',
|
||||||
|
'state',
|
||||||
self.main_dag_name,
|
self.main_dag_name,
|
||||||
task_id,
|
task_id,
|
||||||
execution_date],
|
execution_date],
|
||||||
|
@ -1,13 +1,14 @@
|
|||||||
bandit==1.6.0
|
bandit==1.6.0
|
||||||
flake8==3.8.4
|
flake8
|
||||||
pytest==3.5.0
|
pytest==7.4.0
|
||||||
pytest-cov==2.5.1
|
pytest-cov==4.1.0
|
||||||
|
coverage==5.3
|
||||||
testfixtures
|
testfixtures
|
||||||
tox<=4.0.0
|
tox<=4.0.0
|
||||||
typing-extensions==3.7.2
|
# typing-extensions==3.7.2
|
||||||
pyflakes>=2.1.1
|
pyflakes
|
||||||
# Pin apache-airflow 1.10.5 to align with version in airflow images requirements.txt
|
# Pin apache-airflow 1.10.5 to align with version in airflow images requirements.txt
|
||||||
apache-airflow[crypto,celery,hive,hdfs,jdbc]==1.10.5
|
# apache-airflow[crypto,celery,hive,hdfs,jdbc]==1.10.5
|
||||||
# install postgres seperately as apache-airflow 1.10.5 forces postgres to use
|
# install postgres seperately as apache-airflow 1.10.5 forces postgres to use
|
||||||
# psycopg2 instead of psycopg2-binary which requires additional apt packages
|
# psycopg2 instead of psycopg2-binary which requires additional apt packages
|
||||||
# to be installed, i.e. postgres-devel. Otherwise the following error is seen:
|
# to be installed, i.e. postgres-devel. Otherwise the following error is seen:
|
||||||
|
@ -236,12 +236,12 @@ class TestDeploymentGroup:
|
|||||||
with pytest.raises(InvalidDeploymentGroupNodeLookupError) as err:
|
with pytest.raises(InvalidDeploymentGroupNodeLookupError) as err:
|
||||||
DeploymentGroup(yaml.safe_load(_GROUP_YAML_1),
|
DeploymentGroup(yaml.safe_load(_GROUP_YAML_1),
|
||||||
broken_node_lookup_1)
|
broken_node_lookup_1)
|
||||||
assert str(err).endswith("is not an iterable")
|
assert err.match("is not an iterable")
|
||||||
|
|
||||||
with pytest.raises(InvalidDeploymentGroupNodeLookupError) as err:
|
with pytest.raises(InvalidDeploymentGroupNodeLookupError) as err:
|
||||||
DeploymentGroup(yaml.safe_load(_GROUP_YAML_1),
|
DeploymentGroup(yaml.safe_load(_GROUP_YAML_1),
|
||||||
broken_node_lookup_2)
|
broken_node_lookup_2)
|
||||||
assert str(err).endswith("is not all strings")
|
assert err.match("is not all strings")
|
||||||
|
|
||||||
def test_set_stage(self):
|
def test_set_stage(self):
|
||||||
dg = DeploymentGroup(yaml.safe_load(_GROUP_YAML_ALL_SELECTOR),
|
dg = DeploymentGroup(yaml.safe_load(_GROUP_YAML_ALL_SELECTOR),
|
||||||
@ -269,7 +269,7 @@ class TestStage:
|
|||||||
Stage.PREPARED]
|
Stage.PREPARED]
|
||||||
with pytest.raises(DeploymentGroupStageError) as de:
|
with pytest.raises(DeploymentGroupStageError) as de:
|
||||||
Stage.previous_stage('Chickens and Turkeys')
|
Stage.previous_stage('Chickens and Turkeys')
|
||||||
assert str(de).endswith("Chickens and Turkeys is not a valid stage")
|
assert de.match("Chickens and Turkeys is not a valid stage")
|
||||||
|
|
||||||
|
|
||||||
class TestCheckLabelFormat:
|
class TestCheckLabelFormat:
|
||||||
|
@ -92,7 +92,8 @@ def test__get_threshold_date():
|
|||||||
arrow.get('2017-10-09T10:00:00.00000').naive
|
arrow.get('2017-10-09T10:00:00.00000').naive
|
||||||
)
|
)
|
||||||
|
|
||||||
|
RUN_ID_ONE = "AAAAAAAAAAAAAAAAAAAAA"
|
||||||
|
RUN_ID_TWO = "BBBBBBBBBBBBBBBBBBBBB"
|
||||||
DATE_ONE = datetime(2017, 9, 13, 11, 13, 3, 57000)
|
DATE_ONE = datetime(2017, 9, 13, 11, 13, 3, 57000)
|
||||||
DATE_TWO = datetime(2017, 9, 13, 11, 13, 5, 57000)
|
DATE_TWO = datetime(2017, 9, 13, 11, 13, 5, 57000)
|
||||||
|
|
||||||
@ -100,7 +101,7 @@ DAG_RUN_1 = {
|
|||||||
'dag_id': 'did2',
|
'dag_id': 'did2',
|
||||||
'execution_date': DATE_ONE,
|
'execution_date': DATE_ONE,
|
||||||
'state': 'FAILED',
|
'state': 'FAILED',
|
||||||
'run_id': '99',
|
'run_id': RUN_ID_TWO,
|
||||||
'external_trigger': 'something',
|
'external_trigger': 'something',
|
||||||
'start_date': DATE_ONE,
|
'start_date': DATE_ONE,
|
||||||
'end_date': DATE_ONE
|
'end_date': DATE_ONE
|
||||||
@ -133,9 +134,8 @@ TASK_LIST = [
|
|||||||
{
|
{
|
||||||
'task_id': '1a',
|
'task_id': '1a',
|
||||||
'dag_id': 'did2',
|
'dag_id': 'did2',
|
||||||
'execution_date': DATE_ONE,
|
|
||||||
'state': 'SUCCESS',
|
'state': 'SUCCESS',
|
||||||
'run_id': '12345',
|
'run_id': RUN_ID_ONE,
|
||||||
'external_trigger': 'something',
|
'external_trigger': 'something',
|
||||||
'start_date': DATE_ONE,
|
'start_date': DATE_ONE,
|
||||||
'end_date': DATE_ONE,
|
'end_date': DATE_ONE,
|
||||||
@ -147,9 +147,8 @@ TASK_LIST = [
|
|||||||
{
|
{
|
||||||
'task_id': '1b',
|
'task_id': '1b',
|
||||||
'dag_id': 'did2',
|
'dag_id': 'did2',
|
||||||
'execution_date': DATE_ONE,
|
|
||||||
'state': 'SUCCESS',
|
'state': 'SUCCESS',
|
||||||
'run_id': '12345',
|
'run_id': RUN_ID_ONE,
|
||||||
'external_trigger': 'something',
|
'external_trigger': 'something',
|
||||||
'start_date': DATE_TWO,
|
'start_date': DATE_TWO,
|
||||||
'end_date': DATE_TWO,
|
'end_date': DATE_TWO,
|
||||||
@ -161,9 +160,8 @@ TASK_LIST = [
|
|||||||
{
|
{
|
||||||
'task_id': '1c',
|
'task_id': '1c',
|
||||||
'dag_id': 'did2',
|
'dag_id': 'did2',
|
||||||
'execution_date': DATE_ONE,
|
|
||||||
'state': 'FAILED',
|
'state': 'FAILED',
|
||||||
'run_id': '12345',
|
'run_id': RUN_ID_ONE,
|
||||||
'external_trigger': 'something',
|
'external_trigger': 'something',
|
||||||
'start_date': DATE_TWO,
|
'start_date': DATE_TWO,
|
||||||
'end_date': DATE_TWO,
|
'end_date': DATE_TWO,
|
||||||
@ -198,7 +196,7 @@ DAG_RUN_SUB = {
|
|||||||
'dag_id': 'did2.didnt',
|
'dag_id': 'did2.didnt',
|
||||||
'execution_date': DATE_ONE,
|
'execution_date': DATE_ONE,
|
||||||
'state': 'FAILED',
|
'state': 'FAILED',
|
||||||
'run_id': '99',
|
'run_id': RUN_ID_TWO,
|
||||||
'external_trigger': 'something',
|
'external_trigger': 'something',
|
||||||
'start_date': DATE_ONE,
|
'start_date': DATE_ONE,
|
||||||
'end_date': DATE_ONE
|
'end_date': DATE_ONE
|
||||||
@ -220,4 +218,4 @@ def test_get_workflow_subords():
|
|||||||
assert dag_detail['dag_id'] == 'did2'
|
assert dag_detail['dag_id'] == 'did2'
|
||||||
assert len(dag_detail['sub_dags']) == 1
|
assert len(dag_detail['sub_dags']) == 1
|
||||||
assert dag_detail['sub_dags'][0]['dag_id'] == 'did2.didnt'
|
assert dag_detail['sub_dags'][0]['dag_id'] == 'did2.didnt'
|
||||||
assert len(dag_detail['steps']) == 3
|
assert len(dag_detail['steps']) == 3
|
@ -39,6 +39,8 @@ from shipyard_airflow.control.helpers.configdocs_helper import (
|
|||||||
from shipyard_airflow.errors import ApiError
|
from shipyard_airflow.errors import ApiError
|
||||||
from shipyard_airflow.policy import ShipyardPolicy
|
from shipyard_airflow.policy import ShipyardPolicy
|
||||||
|
|
||||||
|
RUN_ID_ONE = "AAAAAAAAAAAAAAAAAAAAA"
|
||||||
|
RUN_ID_TWO = "BBBBBBBBBBBBBBBBBBBBB"
|
||||||
DATE_ONE = datetime(2017, 9, 13, 11, 13, 3, 57000)
|
DATE_ONE = datetime(2017, 9, 13, 11, 13, 3, 57000)
|
||||||
DATE_TWO = datetime(2017, 9, 13, 11, 13, 5, 57000)
|
DATE_TWO = datetime(2017, 9, 13, 11, 13, 5, 57000)
|
||||||
DATE_ONE_STR = DATE_ONE.strftime('%Y-%m-%dT%H:%M:%S')
|
DATE_ONE_STR = DATE_ONE.strftime('%Y-%m-%dT%H:%M:%S')
|
||||||
@ -107,7 +109,7 @@ def actions_db():
|
|||||||
"""
|
"""
|
||||||
return [
|
return [
|
||||||
{
|
{
|
||||||
'id': 'aaaaaa',
|
'id': RUN_ID_ONE,
|
||||||
'name': 'dag_it',
|
'name': 'dag_it',
|
||||||
'parameters': None,
|
'parameters': None,
|
||||||
'dag_id': 'did1',
|
'dag_id': 'did1',
|
||||||
@ -117,7 +119,7 @@ def actions_db():
|
|||||||
'context_marker': '8-4-4-4-12a'
|
'context_marker': '8-4-4-4-12a'
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
'id': 'bbbbbb',
|
'id': RUN_ID_TWO,
|
||||||
'name': 'dag2',
|
'name': 'dag2',
|
||||||
'parameters': {
|
'parameters': {
|
||||||
'p1': 'p1val'
|
'p1': 'p1val'
|
||||||
@ -140,7 +142,7 @@ def dag_runs_db():
|
|||||||
'dag_id': 'did2',
|
'dag_id': 'did2',
|
||||||
'execution_date': DATE_ONE,
|
'execution_date': DATE_ONE,
|
||||||
'state': 'SUCCESS',
|
'state': 'SUCCESS',
|
||||||
'run_id': '12345',
|
'run_id': RUN_ID_TWO,
|
||||||
'external_trigger': 'something',
|
'external_trigger': 'something',
|
||||||
'start_date': DATE_ONE,
|
'start_date': DATE_ONE,
|
||||||
'end_date': DATE_TWO
|
'end_date': DATE_TWO
|
||||||
@ -149,7 +151,7 @@ def dag_runs_db():
|
|||||||
'dag_id': 'did1',
|
'dag_id': 'did1',
|
||||||
'execution_date': DATE_ONE,
|
'execution_date': DATE_ONE,
|
||||||
'state': 'FAILED',
|
'state': 'FAILED',
|
||||||
'run_id': '99',
|
'run_id': RUN_ID_ONE,
|
||||||
'external_trigger': 'something',
|
'external_trigger': 'something',
|
||||||
'start_date': DATE_ONE,
|
'start_date': DATE_ONE,
|
||||||
'end_date': DATE_ONE
|
'end_date': DATE_ONE
|
||||||
@ -165,9 +167,9 @@ def tasks_db():
|
|||||||
{
|
{
|
||||||
'task_id': '1a',
|
'task_id': '1a',
|
||||||
'dag_id': 'did2',
|
'dag_id': 'did2',
|
||||||
'execution_date': DATE_ONE,
|
|
||||||
'state': 'SUCCESS',
|
'state': 'SUCCESS',
|
||||||
'run_id': '12345',
|
'execution_date': DATE_ONE,
|
||||||
|
'run_id': RUN_ID_TWO,
|
||||||
'external_trigger': 'something',
|
'external_trigger': 'something',
|
||||||
'start_date': DATE_ONE,
|
'start_date': DATE_ONE,
|
||||||
'end_date': DATE_TWO,
|
'end_date': DATE_TWO,
|
||||||
@ -179,9 +181,9 @@ def tasks_db():
|
|||||||
{
|
{
|
||||||
'task_id': '1b',
|
'task_id': '1b',
|
||||||
'dag_id': 'did2',
|
'dag_id': 'did2',
|
||||||
'execution_date': DATE_ONE,
|
|
||||||
'state': 'SUCCESS',
|
'state': 'SUCCESS',
|
||||||
'run_id': '12345',
|
'execution_date': DATE_ONE,
|
||||||
|
'run_id': RUN_ID_TWO,
|
||||||
'external_trigger': 'something',
|
'external_trigger': 'something',
|
||||||
'start_date': DATE_ONE,
|
'start_date': DATE_ONE,
|
||||||
'end_date': DATE_TWO,
|
'end_date': DATE_TWO,
|
||||||
@ -193,9 +195,9 @@ def tasks_db():
|
|||||||
{
|
{
|
||||||
'task_id': '1c',
|
'task_id': '1c',
|
||||||
'dag_id': 'did2',
|
'dag_id': 'did2',
|
||||||
'execution_date': DATE_ONE,
|
|
||||||
'state': 'SUCCESS',
|
'state': 'SUCCESS',
|
||||||
'run_id': '12345',
|
'execution_date': DATE_ONE,
|
||||||
|
'run_id': RUN_ID_TWO,
|
||||||
'external_trigger': 'something',
|
'external_trigger': 'something',
|
||||||
'start_date': DATE_ONE,
|
'start_date': DATE_ONE,
|
||||||
'end_date': DATE_TWO,
|
'end_date': DATE_TWO,
|
||||||
@ -207,8 +209,9 @@ def tasks_db():
|
|||||||
{
|
{
|
||||||
'task_id': '2a',
|
'task_id': '2a',
|
||||||
'dag_id': 'did1',
|
'dag_id': 'did1',
|
||||||
'execution_date': DATE_ONE,
|
|
||||||
'state': 'FAILED',
|
'state': 'FAILED',
|
||||||
|
'execution_date': DATE_ONE,
|
||||||
|
'run_id': RUN_ID_ONE,
|
||||||
'start_date': DATE_ONE,
|
'start_date': DATE_ONE,
|
||||||
'end_date': DATE_ONE,
|
'end_date': DATE_ONE,
|
||||||
'duration': '1second',
|
'duration': '1second',
|
||||||
@ -354,16 +357,16 @@ def test_get_all_actions_notes(*args):
|
|||||||
action_resource.get_all_dag_runs_db = dag_runs_db
|
action_resource.get_all_dag_runs_db = dag_runs_db
|
||||||
action_resource.get_all_tasks_db = tasks_db
|
action_resource.get_all_tasks_db = tasks_db
|
||||||
# inject some notes
|
# inject some notes
|
||||||
nh.make_action_note('aaaaaa', "hello from aaaaaa1")
|
nh.make_action_note(RUN_ID_ONE, "hello from aaaaaa1")
|
||||||
nh.make_action_note('aaaaaa', "hello from aaaaaa2")
|
nh.make_action_note(RUN_ID_ONE, "hello from aaaaaa2")
|
||||||
nh.make_action_note('bbbbbb', "hello from bbbbbb")
|
nh.make_action_note(RUN_ID_TWO, "hello from bbbbbb")
|
||||||
|
|
||||||
result = action_resource.get_all_actions(verbosity=1)
|
result = action_resource.get_all_actions(verbosity=1)
|
||||||
assert len(result) == len(actions_db())
|
assert len(result) == len(actions_db())
|
||||||
for action in result:
|
for action in result:
|
||||||
if action['id'] == 'aaaaaa':
|
if action['id'] == RUN_ID_ONE:
|
||||||
assert len(action['notes']) == 2
|
assert len(action['notes']) == 2
|
||||||
if action['id'] == 'bbbbbb':
|
if action['id'] == RUN_ID_TWO:
|
||||||
assert len(action['notes']) == 1
|
assert len(action['notes']) == 1
|
||||||
assert action['notes'][0]['note_val'] == 'hello from bbbbbb'
|
assert action['notes'][0]['note_val'] == 'hello from bbbbbb'
|
||||||
|
|
||||||
|
@ -28,6 +28,8 @@ from shipyard_airflow.db.db import AIRFLOW_DB, SHIPYARD_DB
|
|||||||
from shipyard_airflow.errors import ApiError
|
from shipyard_airflow.errors import ApiError
|
||||||
from tests.unit.control.common import create_req, create_resp
|
from tests.unit.control.common import create_req, create_resp
|
||||||
|
|
||||||
|
RUN_ID_ONE = "AAAAAAAAAAAAAAAAAAAAA"
|
||||||
|
RUN_ID_TWO = "BBBBBBBBBBBBBBBBBBBBB"
|
||||||
DATE_ONE = datetime(2017, 9, 13, 11, 13, 3, 57000)
|
DATE_ONE = datetime(2017, 9, 13, 11, 13, 3, 57000)
|
||||||
DATE_TWO = datetime(2017, 9, 13, 11, 13, 5, 57000)
|
DATE_TWO = datetime(2017, 9, 13, 11, 13, 5, 57000)
|
||||||
DATE_ONE_STR = DATE_ONE.strftime('%Y-%m-%dT%H:%M:%S')
|
DATE_ONE_STR = DATE_ONE.strftime('%Y-%m-%dT%H:%M:%S')
|
||||||
@ -50,10 +52,10 @@ def actions_db(action_id):
|
|||||||
replaces the actual db call
|
replaces the actual db call
|
||||||
"""
|
"""
|
||||||
return {
|
return {
|
||||||
'id': '12345678901234567890123456',
|
'id': RUN_ID_ONE,
|
||||||
'name': 'dag_it',
|
'name': 'dag_it',
|
||||||
'parameters': None,
|
'parameters': None,
|
||||||
'dag_id': 'did2',
|
'dag_id': 'did1',
|
||||||
'dag_execution_date': DATE_ONE_STR,
|
'dag_execution_date': DATE_ONE_STR,
|
||||||
'user': 'robot1',
|
'user': 'robot1',
|
||||||
'timestamp': DATE_ONE,
|
'timestamp': DATE_ONE,
|
||||||
@ -69,7 +71,7 @@ def dag_runs_db(dag_id, execution_date):
|
|||||||
'dag_id': 'did2',
|
'dag_id': 'did2',
|
||||||
'execution_date': DATE_ONE,
|
'execution_date': DATE_ONE,
|
||||||
'state': 'FAILED',
|
'state': 'FAILED',
|
||||||
'run_id': '99',
|
'run_id': RUN_ID_TWO,
|
||||||
'external_trigger': 'something',
|
'external_trigger': 'something',
|
||||||
'start_date': DATE_ONE,
|
'start_date': DATE_ONE,
|
||||||
'end_date': DATE_ONE
|
'end_date': DATE_ONE
|
||||||
@ -83,9 +85,8 @@ def tasks_db(dag_id, execution_date):
|
|||||||
return [{
|
return [{
|
||||||
'task_id': '1a',
|
'task_id': '1a',
|
||||||
'dag_id': 'did2',
|
'dag_id': 'did2',
|
||||||
'execution_date': DATE_ONE,
|
|
||||||
'state': 'SUCCESS',
|
'state': 'SUCCESS',
|
||||||
'run_id': '12345',
|
'run_id': RUN_ID_TWO,
|
||||||
'external_trigger': 'something',
|
'external_trigger': 'something',
|
||||||
'start_date': DATE_ONE,
|
'start_date': DATE_ONE,
|
||||||
'end_date': DATE_ONE,
|
'end_date': DATE_ONE,
|
||||||
@ -96,9 +97,8 @@ def tasks_db(dag_id, execution_date):
|
|||||||
}, {
|
}, {
|
||||||
'task_id': '1b',
|
'task_id': '1b',
|
||||||
'dag_id': 'did2',
|
'dag_id': 'did2',
|
||||||
'execution_date': DATE_ONE,
|
|
||||||
'state': 'SUCCESS',
|
'state': 'SUCCESS',
|
||||||
'run_id': '12345',
|
'run_id': RUN_ID_TWO,
|
||||||
'external_trigger': 'something',
|
'external_trigger': 'something',
|
||||||
'start_date': DATE_TWO,
|
'start_date': DATE_TWO,
|
||||||
'end_date': DATE_TWO,
|
'end_date': DATE_TWO,
|
||||||
@ -109,9 +109,8 @@ def tasks_db(dag_id, execution_date):
|
|||||||
}, {
|
}, {
|
||||||
'task_id': '1c',
|
'task_id': '1c',
|
||||||
'dag_id': 'did2',
|
'dag_id': 'did2',
|
||||||
'execution_date': DATE_ONE,
|
|
||||||
'state': 'FAILED',
|
'state': 'FAILED',
|
||||||
'run_id': '12345',
|
'run_id': RUN_ID_TWO,
|
||||||
'external_trigger': 'something',
|
'external_trigger': 'something',
|
||||||
'start_date': DATE_TWO,
|
'start_date': DATE_TWO,
|
||||||
'end_date': DATE_TWO,
|
'end_date': DATE_TWO,
|
||||||
@ -230,7 +229,7 @@ def test_get_dag_run_by_id_notempty():
|
|||||||
'dag_id': 'did2',
|
'dag_id': 'did2',
|
||||||
'execution_date': DATE_ONE,
|
'execution_date': DATE_ONE,
|
||||||
'state': 'FAILED',
|
'state': 'FAILED',
|
||||||
'run_id': '99',
|
'run_id': RUN_ID_TWO,
|
||||||
'external_trigger': 'something',
|
'external_trigger': 'something',
|
||||||
'start_date': DATE_ONE,
|
'start_date': DATE_ONE,
|
||||||
'end_date': DATE_ONE
|
'end_date': DATE_ONE
|
||||||
@ -296,14 +295,13 @@ def test_get_tasks_db(mock_get_tasks_by_id):
|
|||||||
dag_id=dag_id, execution_date=execution_date)
|
dag_id=dag_id, execution_date=execution_date)
|
||||||
assert result == expected
|
assert result == expected
|
||||||
|
|
||||||
|
|
||||||
@mock.patch.object(AIRFLOW_DB, 'get_dag_runs_by_id')
|
@mock.patch.object(AIRFLOW_DB, 'get_dag_runs_by_id')
|
||||||
def test_get_dag_run_db(mock_get_dag_runs_by_id):
|
def test_get_dag_run_db(mock_get_dag_runs_by_id):
|
||||||
expected = {
|
expected = {
|
||||||
'dag_id': 'did2',
|
'dag_id': 'did2',
|
||||||
'execution_date': DATE_ONE,
|
'execution_date': DATE_ONE,
|
||||||
'state': 'FAILED',
|
'state': 'FAILED',
|
||||||
'run_id': '99',
|
'run_id': RUN_ID_TWO,
|
||||||
'external_trigger': 'something',
|
'external_trigger': 'something',
|
||||||
'start_date': DATE_ONE,
|
'start_date': DATE_ONE,
|
||||||
'end_date': DATE_ONE
|
'end_date': DATE_ONE
|
||||||
@ -318,7 +316,6 @@ def test_get_dag_run_db(mock_get_dag_runs_by_id):
|
|||||||
dag_id=dag_id, execution_date=execution_date)
|
dag_id=dag_id, execution_date=execution_date)
|
||||||
assert result == expected
|
assert result == expected
|
||||||
|
|
||||||
|
|
||||||
@mock.patch.object(SHIPYARD_DB, 'get_command_audit_by_action_id')
|
@mock.patch.object(SHIPYARD_DB, 'get_command_audit_by_action_id')
|
||||||
def test_get_action_command_audit_db(mock_get_command_audit_by_action_id):
|
def test_get_action_command_audit_db(mock_get_command_audit_by_action_id):
|
||||||
expected = {
|
expected = {
|
||||||
|
@ -26,6 +26,8 @@ from shipyard_airflow.control.action.actions_steps_id_api import \
|
|||||||
from shipyard_airflow.errors import ApiError
|
from shipyard_airflow.errors import ApiError
|
||||||
from tests.unit.control import common
|
from tests.unit.control import common
|
||||||
|
|
||||||
|
RUN_ID_ONE = "AAAAAAAAAAAAAAAAAAAAA"
|
||||||
|
RUN_ID_TWO = "BBBBBBBBBBBBBBBBBBBBB"
|
||||||
DATE_ONE = datetime(2017, 9, 13, 11, 13, 3, 57000)
|
DATE_ONE = datetime(2017, 9, 13, 11, 13, 3, 57000)
|
||||||
DATE_TWO = datetime(2017, 9, 13, 11, 13, 5, 57000)
|
DATE_TWO = datetime(2017, 9, 13, 11, 13, 5, 57000)
|
||||||
DATE_ONE_STR = DATE_ONE.strftime('%Y-%m-%dT%H:%M:%S')
|
DATE_ONE_STR = DATE_ONE.strftime('%Y-%m-%dT%H:%M:%S')
|
||||||
@ -64,9 +66,9 @@ def tasks_db(dag_id, execution_date):
|
|||||||
return [{
|
return [{
|
||||||
'task_id': '1a',
|
'task_id': '1a',
|
||||||
'dag_id': 'did2',
|
'dag_id': 'did2',
|
||||||
'execution_date': DATE_ONE,
|
# 'execution_date': DATE_ONE,
|
||||||
'state': 'SUCCESS',
|
'state': 'SUCCESS',
|
||||||
'run_id': '12345',
|
'run_id': RUN_ID_ONE,
|
||||||
'external_trigger': 'something',
|
'external_trigger': 'something',
|
||||||
'start_date': DATE_ONE,
|
'start_date': DATE_ONE,
|
||||||
'end_date': DATE_ONE,
|
'end_date': DATE_ONE,
|
||||||
@ -77,9 +79,9 @@ def tasks_db(dag_id, execution_date):
|
|||||||
}, {
|
}, {
|
||||||
'task_id': '1b',
|
'task_id': '1b',
|
||||||
'dag_id': 'did2',
|
'dag_id': 'did2',
|
||||||
'execution_date': DATE_ONE,
|
# 'execution_date': DATE_ONE,
|
||||||
'state': 'SUCCESS',
|
'state': 'SUCCESS',
|
||||||
'run_id': '12345',
|
'run_id': RUN_ID_ONE,
|
||||||
'external_trigger': 'something',
|
'external_trigger': 'something',
|
||||||
'start_date': DATE_TWO,
|
'start_date': DATE_TWO,
|
||||||
'end_date': DATE_TWO,
|
'end_date': DATE_TWO,
|
||||||
@ -90,9 +92,9 @@ def tasks_db(dag_id, execution_date):
|
|||||||
}, {
|
}, {
|
||||||
'task_id': '1c',
|
'task_id': '1c',
|
||||||
'dag_id': 'did2',
|
'dag_id': 'did2',
|
||||||
'execution_date': DATE_ONE,
|
# 'execution_date': DATE_ONE,
|
||||||
'state': 'FAILED',
|
'state': 'FAILED',
|
||||||
'run_id': '12345',
|
'run_id': RUN_ID_ONE,
|
||||||
'external_trigger': 'something',
|
'external_trigger': 'something',
|
||||||
'start_date': DATE_TWO,
|
'start_date': DATE_TWO,
|
||||||
'end_date': DATE_TWO,
|
'end_date': DATE_TWO,
|
||||||
@ -172,3 +174,4 @@ class TestActionsStepsResource():
|
|||||||
action_resource.get_tasks_db(dag_id, execution_date)
|
action_resource.get_tasks_db(dag_id, execution_date)
|
||||||
mock_get_tasks_by_id.assert_called_with(
|
mock_get_tasks_by_id.assert_called_with(
|
||||||
dag_id=dag_id, execution_date=execution_date)
|
dag_id=dag_id, execution_date=execution_date)
|
||||||
|
|
||||||
|
@ -57,7 +57,7 @@ TASK_INSTANCE_DB = [
|
|||||||
{
|
{
|
||||||
'task_id': 'action_xcom',
|
'task_id': 'action_xcom',
|
||||||
'dag_id': 'deploy_site',
|
'dag_id': 'deploy_site',
|
||||||
'execution_date': DATE_ONE,
|
# 'execution_date': DATE_ONE,
|
||||||
'start_date': DATE_TWO,
|
'start_date': DATE_TWO,
|
||||||
'end_date': DATE_THREE,
|
'end_date': DATE_THREE,
|
||||||
'duration': '4.353791',
|
'duration': '4.353791',
|
||||||
@ -75,7 +75,7 @@ TASK_INSTANCE_DB = [
|
|||||||
}, {
|
}, {
|
||||||
'task_id': 'dag_concurrency_check',
|
'task_id': 'dag_concurrency_check',
|
||||||
'dag_id': 'deploy_site',
|
'dag_id': 'deploy_site',
|
||||||
'execution_date': DATE_ONE,
|
# 'execution_date': DATE_ONE,
|
||||||
'start_date': DATE_TWO,
|
'start_date': DATE_TWO,
|
||||||
'end_date': DATE_THREE,
|
'end_date': DATE_THREE,
|
||||||
'duration': '4.034112',
|
'duration': '4.034112',
|
||||||
@ -93,7 +93,7 @@ TASK_INSTANCE_DB = [
|
|||||||
}, {
|
}, {
|
||||||
'task_id': 'k8s_preflight_check',
|
'task_id': 'k8s_preflight_check',
|
||||||
'dag_id': 'deploy_site',
|
'dag_id': 'deploy_site',
|
||||||
'execution_date': DATE_ONE,
|
# 'execution_date': DATE_ONE,
|
||||||
'start_date': DATE_TWO,
|
'start_date': DATE_TWO,
|
||||||
'end_date': DATE_THREE,
|
'end_date': DATE_THREE,
|
||||||
'duration': '4.452571',
|
'duration': '4.452571',
|
||||||
|
@ -39,7 +39,7 @@ RELEASES = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class TestArmadaTestReleasesOperator:
|
class TestArmadaTestReleasesOperator(ArmadaTestReleasesOperator):
|
||||||
@mock.patch('shipyard_airflow.plugins.armada_test_releases.LOG.info')
|
@mock.patch('shipyard_airflow.plugins.armada_test_releases.LOG.info')
|
||||||
@mock.patch.object(ArmadaBaseOperator, 'armada_client', create=True)
|
@mock.patch.object(ArmadaBaseOperator, 'armada_client', create=True)
|
||||||
@mock.patch.object(ArmadaBaseOperator, 'get_releases',
|
@mock.patch.object(ArmadaBaseOperator, 'get_releases',
|
||||||
|
@ -143,6 +143,7 @@ class TestDrydockDestroyNodesOperator:
|
|||||||
assert ct.called
|
assert ct.called
|
||||||
assert "Task destroy_nodes has failed." in caplog.text
|
assert "Task destroy_nodes has failed." in caplog.text
|
||||||
|
|
||||||
|
|
||||||
@mock.patch.object(
|
@mock.patch.object(
|
||||||
DrydockDestroyNodeOperator, 'create_task'
|
DrydockDestroyNodeOperator, 'create_task'
|
||||||
)
|
)
|
||||||
@ -192,8 +193,8 @@ class TestDrydockDestroyNodesOperator:
|
|||||||
assert gs.called
|
assert gs.called
|
||||||
assert "Task destroy_nodes has timed out after 10 seconds." in (
|
assert "Task destroy_nodes has timed out after 10 seconds." in (
|
||||||
caplog.text)
|
caplog.text)
|
||||||
assert ("One or more nodes requested for destruction failed to "
|
assert ("One or more nodes requested for destruction failed to "
|
||||||
"destroy") in str(ae.value)
|
"destroy") in str(ae.value)
|
||||||
|
|
||||||
@mock.patch.object(
|
@mock.patch.object(
|
||||||
DrydockDestroyNodeOperator, 'get_successes_for_task',
|
DrydockDestroyNodeOperator, 'get_successes_for_task',
|
||||||
|
@ -351,8 +351,9 @@ class TestDrydockNodesOperator:
|
|||||||
assert op._execute_task.call_count == 1
|
assert op._execute_task.call_count == 1
|
||||||
|
|
||||||
@mock.patch("shipyard_airflow.plugins.check_k8s_node_status."
|
@mock.patch("shipyard_airflow.plugins.check_k8s_node_status."
|
||||||
"check_node_status", return_value=[])
|
"check_node_status", return_value=[]
|
||||||
def test_execute_deployment(self, cns):
|
)
|
||||||
|
def test_execute_deployment(self,cns):
|
||||||
op = DrydockNodesOperator(main_dag_name="main",
|
op = DrydockNodesOperator(main_dag_name="main",
|
||||||
shipyard_conf=CONF_FILE,
|
shipyard_conf=CONF_FILE,
|
||||||
task_id="t1")
|
task_id="t1")
|
||||||
@ -369,8 +370,11 @@ class TestDrydockNodesOperator:
|
|||||||
assert op._execute_task.call_count == 1
|
assert op._execute_task.call_count == 1
|
||||||
assert cns.call_count == 1
|
assert cns.call_count == 1
|
||||||
|
|
||||||
|
|
||||||
@mock.patch("shipyard_airflow.plugins.check_k8s_node_status."
|
@mock.patch("shipyard_airflow.plugins.check_k8s_node_status."
|
||||||
"check_node_status", return_value=['node2', 'node4'])
|
"check_node_status",
|
||||||
|
return_value=['node2', 'node4']
|
||||||
|
)
|
||||||
def test_execute_deployment_k8s_fail(self, cns, caplog):
|
def test_execute_deployment_k8s_fail(self, cns, caplog):
|
||||||
op = DrydockNodesOperator(main_dag_name="main",
|
op = DrydockNodesOperator(main_dag_name="main",
|
||||||
shipyard_conf=CONF_FILE,
|
shipyard_conf=CONF_FILE,
|
||||||
|
@ -187,8 +187,8 @@ class TestDrydockRelabelNodesOperator:
|
|||||||
assert gs.called
|
assert gs.called
|
||||||
assert "Task relabel_nodes has timed out after 2 seconds." in (
|
assert "Task relabel_nodes has timed out after 2 seconds." in (
|
||||||
caplog.text)
|
caplog.text)
|
||||||
assert ("One or more nodes requested for relabeling failed to "
|
assert ("One or more nodes requested for relabeling failed to "
|
||||||
"relabel") in str(ae.value)
|
"relabel") in str(ae.value)
|
||||||
|
|
||||||
@mock.patch.object(
|
@mock.patch.object(
|
||||||
DrydockRelabelNodesOperator, 'get_successes_for_task',
|
DrydockRelabelNodesOperator, 'get_successes_for_task',
|
||||||
|
@ -10,19 +10,6 @@ setenv=
|
|||||||
deps =
|
deps =
|
||||||
-r{toxinidir}/requirements-frozen.txt
|
-r{toxinidir}/requirements-frozen.txt
|
||||||
-r{toxinidir}/test-requirements.txt
|
-r{toxinidir}/test-requirements.txt
|
||||||
|
|
||||||
|
|
||||||
[testenv:freeze-bionic]
|
|
||||||
recreate = True
|
|
||||||
allowlist_externals=
|
|
||||||
rm
|
|
||||||
sh
|
|
||||||
deps=
|
|
||||||
-r{toxinidir}/requirements-direct.txt
|
|
||||||
commands=
|
|
||||||
rm -f {toxinidir}/requirements-frozen-bionic.txt
|
|
||||||
sh -c "pip freeze --all | grep -vE 'shipyard_airflow|pyinotify|pkg-resources' > requirements-frozen-bionic.txt"
|
|
||||||
|
|
||||||
[testenv:freeze]
|
[testenv:freeze]
|
||||||
recreate = True
|
recreate = True
|
||||||
allowlist_externals=
|
allowlist_externals=
|
||||||
@ -30,42 +17,54 @@ allowlist_externals=
|
|||||||
sh
|
sh
|
||||||
deps=
|
deps=
|
||||||
-r{toxinidir}/requirements-direct.txt
|
-r{toxinidir}/requirements-direct.txt
|
||||||
|
-c https://raw.githubusercontent.com/apache/airflow/constraints-2.6.2/constraints-3.8.txt
|
||||||
commands=
|
commands=
|
||||||
rm -f {toxinidir}/requirements-frozen.txt
|
rm -f {toxinidir}/requirements-frozen.txt
|
||||||
sh -c "pip freeze --all | grep -vE 'shipyard_airflow|pyinotify|pkg-resources' > requirements-frozen.txt"
|
sh -c "pip freeze --all | grep -vE 'shipyard_airflow|pyinotify|pkg-resources' > requirements-frozen.txt"
|
||||||
|
|
||||||
|
|
||||||
[testenv:py38]
|
[testenv:py38]
|
||||||
skipsdist=True
|
skipsdist=True
|
||||||
setenv=
|
setenv=
|
||||||
SLUGIFY_USES_TEXT_UNIDECODE=yes
|
SLUGIFY_USES_TEXT_UNIDECODE=yes
|
||||||
|
PYTHONWARNINGS=ignore::DeprecationWarning,ignore::FutureWarning
|
||||||
basepython=python3.8
|
basepython=python3.8
|
||||||
allowlist_externals=
|
allowlist_externals=
|
||||||
bash
|
bash
|
||||||
airflow
|
airflow
|
||||||
commands =
|
commands =
|
||||||
bash -c "rm -f $HOME/airflow/airflow.db"
|
bash -c "rm -rf $HOME/airflow"
|
||||||
pip install -r{toxinidir}/test-requirements.txt
|
airflow version
|
||||||
airflow initdb
|
airflow db init
|
||||||
airflow run example_bash_operator runme_0 2018-01-01
|
airflow db upgrade
|
||||||
airflow backfill example_bash_operator -s 2018-01-01 -e 2018-01-02
|
airflow info
|
||||||
airflow dag_state example_bash_operator 2018-01-01
|
airflow dags list
|
||||||
|
airflow dags list-import-errors
|
||||||
|
airflow tasks test example_bash_operator runme_0
|
||||||
|
airflow dags backfill example_bash_operator -s 2018-01-01 -e 2018-01-02
|
||||||
|
airflow tasks run example_bash_operator runme_0 2018-01-01
|
||||||
|
airflow dags state example_bash_operator 2018-01-01
|
||||||
pytest {posargs} -vv
|
pytest {posargs} -vv
|
||||||
|
|
||||||
[testenv:cover]
|
[testenv:cover]
|
||||||
skipsdist=True
|
skipsdist=True
|
||||||
setenv=
|
setenv=
|
||||||
SLUGIFY_USES_TEXT_UNIDECODE=yes
|
SLUGIFY_USES_TEXT_UNIDECODE=yes
|
||||||
|
PYTHONWARNINGS=ignore::DeprecationWarning,ignore::FutureWarning
|
||||||
allowlist_externals=
|
allowlist_externals=
|
||||||
bash
|
bash
|
||||||
airflow
|
airflow
|
||||||
commands =
|
commands =
|
||||||
bash -c "rm -f $HOME/airflow/airflow.db"
|
bash -c "rm -rf $HOME/airflow"
|
||||||
pip install -r{toxinidir}/test-requirements.txt
|
airflow version
|
||||||
airflow initdb
|
airflow db init
|
||||||
airflow run example_bash_operator runme_0 2018-01-01
|
airflow db upgrade
|
||||||
airflow backfill example_bash_operator -s 2018-01-01 -e 2018-01-02
|
airflow info
|
||||||
airflow dag_state example_bash_operator 2018-01-01
|
airflow dags list
|
||||||
|
airflow dags list-import-errors
|
||||||
|
airflow tasks test example_bash_operator runme_0
|
||||||
|
airflow dags backfill example_bash_operator -s 2018-01-01 -e 2018-01-02
|
||||||
|
airflow tasks run example_bash_operator runme_0 2018-01-01
|
||||||
|
airflow dags state example_bash_operator 2018-01-01
|
||||||
pytest \
|
pytest \
|
||||||
{posargs} \
|
{posargs} \
|
||||||
--cov-branch \
|
--cov-branch \
|
||||||
@ -77,30 +76,7 @@ commands =
|
|||||||
--cov-report term \
|
--cov-report term \
|
||||||
-vv
|
-vv
|
||||||
|
|
||||||
[testenv:py36]
|
|
||||||
skipsdist=True
|
|
||||||
basepython=python3.6
|
|
||||||
deps =
|
|
||||||
-r{toxinidir}/requirements-frozen-bionic.txt
|
|
||||||
setenv=
|
|
||||||
SLUGIFY_USES_TEXT_UNIDECODE=yes
|
|
||||||
allowlist_externals=
|
|
||||||
bash
|
|
||||||
airflow
|
|
||||||
commands =
|
|
||||||
bash -c "rm -f $HOME/airflow/airflow.db"
|
|
||||||
pip install -r{toxinidir}/test-requirements.txt
|
|
||||||
airflow initdb
|
|
||||||
airflow run example_bash_operator runme_0 2018-01-01
|
|
||||||
airflow backfill example_bash_operator -s 2018-01-01 -e 2018-01-02
|
|
||||||
airflow dag_state example_bash_operator 2018-01-01
|
|
||||||
pytest {posargs} -vv
|
|
||||||
|
|
||||||
|
|
||||||
[testenv:pep8]
|
[testenv:pep8]
|
||||||
deps=
|
|
||||||
flake8>=3.3.0
|
|
||||||
bandit>=1.5.0
|
|
||||||
commands =
|
commands =
|
||||||
flake8 {toxinidir}/shipyard_airflow
|
flake8 {toxinidir}/shipyard_airflow
|
||||||
bandit -r shipyard_airflow
|
bandit -r shipyard_airflow
|
||||||
@ -113,9 +89,6 @@ commands =
|
|||||||
[testenv:genconfig]
|
[testenv:genconfig]
|
||||||
setenv=
|
setenv=
|
||||||
SLUGIFY_USES_TEXT_UNIDECODE=yes
|
SLUGIFY_USES_TEXT_UNIDECODE=yes
|
||||||
deps =
|
|
||||||
-r{toxinidir}/test-requirements.txt
|
|
||||||
-r{toxinidir}/requirements-frozen.txt
|
|
||||||
commands =
|
commands =
|
||||||
pip install . --use-pep517
|
pip install . --use-pep517
|
||||||
oslo-config-generator --config-file=generator/config-generator.conf
|
oslo-config-generator --config-file=generator/config-generator.conf
|
||||||
@ -123,9 +96,6 @@ commands =
|
|||||||
[testenv:genpolicy]
|
[testenv:genpolicy]
|
||||||
setenv=
|
setenv=
|
||||||
SLUGIFY_USES_TEXT_UNIDECODE=yes
|
SLUGIFY_USES_TEXT_UNIDECODE=yes
|
||||||
deps =
|
|
||||||
-r{toxinidir}/test-requirements.txt
|
|
||||||
-r{toxinidir}/requirements-frozen.txt
|
|
||||||
commands =
|
commands =
|
||||||
pip install . --use-pep517
|
pip install . --use-pep517
|
||||||
oslopolicy-sample-generator --config-file=generator/policy-generator.conf
|
oslopolicy-sample-generator --config-file=generator/policy-generator.conf
|
||||||
@ -135,9 +105,6 @@ commands =
|
|||||||
skipsdist=True
|
skipsdist=True
|
||||||
setenv=
|
setenv=
|
||||||
SLUGIFY_USES_TEXT_UNIDECODE=yes
|
SLUGIFY_USES_TEXT_UNIDECODE=yes
|
||||||
deps =
|
|
||||||
-r{toxinidir}/test-requirements.txt
|
|
||||||
-r{toxinidir}/requirements-frozen.txt
|
|
||||||
commands =
|
commands =
|
||||||
pip install . --use-pep517
|
pip install . --use-pep517
|
||||||
oslo-config-generator --config-file=generator/config-generator.conf
|
oslo-config-generator --config-file=generator/config-generator.conf
|
||||||
|
@ -14,20 +14,18 @@
|
|||||||
|
|
||||||
# CLI/Client requirements
|
# CLI/Client requirements
|
||||||
|
|
||||||
arrow<=0.17.0
|
arrow
|
||||||
click==7.1.2
|
click
|
||||||
click-default-group==1.2
|
click-default-group
|
||||||
keystoneauth1<=5.1.1
|
keystoneauth1==4.3.1
|
||||||
requests==2.23.0
|
requests
|
||||||
PTable==0.9.2
|
PTable
|
||||||
pylibyaml==0.1.0
|
pylibyaml
|
||||||
PyYAML<=5.4.1
|
PyYAML
|
||||||
Flask==1.1.0
|
Flask
|
||||||
coverage==5.3
|
itsdangerous
|
||||||
itsdangerous==2.0.1
|
Jinja2
|
||||||
Jinja2==2.10.3
|
MarkupSafe
|
||||||
MarkupSafe==2.0.1
|
pbr
|
||||||
pbr==5.5.1
|
python-dateutil
|
||||||
python-dateutil==2.8.1
|
Werkzeug
|
||||||
setuptools<=45.2.0
|
|
||||||
Werkzeug==0.16.1
|
|
||||||
|
@ -1,30 +0,0 @@
|
|||||||
arrow==0.17.0
|
|
||||||
certifi==2023.5.7
|
|
||||||
chardet==3.0.4
|
|
||||||
click==7.1.2
|
|
||||||
click-default-group==1.2
|
|
||||||
coverage==5.3
|
|
||||||
Flask==1.1.0
|
|
||||||
idna==2.10
|
|
||||||
importlib-metadata==4.8.3
|
|
||||||
iso8601==1.1.0
|
|
||||||
itsdangerous==2.0.1
|
|
||||||
Jinja2==2.10.3
|
|
||||||
keystoneauth1==5.1.1
|
|
||||||
MarkupSafe==2.0.1
|
|
||||||
os-service-types==1.7.0
|
|
||||||
pbr==5.5.1
|
|
||||||
pip==21.3.1
|
|
||||||
PTable==0.9.2
|
|
||||||
pylibyaml==0.1.0
|
|
||||||
python-dateutil==2.8.1
|
|
||||||
PyYAML==5.4.1
|
|
||||||
requests==2.23.0
|
|
||||||
setuptools==45.2.0
|
|
||||||
six==1.16.0
|
|
||||||
stevedore==3.5.2
|
|
||||||
typing_extensions==4.1.1
|
|
||||||
urllib3==1.25.11
|
|
||||||
Werkzeug==0.16.1
|
|
||||||
wheel==0.37.1
|
|
||||||
zipp==3.6.0
|
|
@ -1,27 +1,28 @@
|
|||||||
arrow==0.17.0
|
arrow==1.2.3
|
||||||
certifi==2023.5.7
|
certifi==2023.5.7
|
||||||
chardet==3.0.4
|
charset-normalizer==3.1.0
|
||||||
click==7.1.2
|
click==8.1.3
|
||||||
click-default-group==1.2
|
click-default-group==1.2.2
|
||||||
coverage==5.3
|
Flask==2.2.5
|
||||||
Flask==1.1.0
|
idna==3.4
|
||||||
idna==2.10
|
importlib-metadata==4.13.0
|
||||||
iso8601==1.1.0
|
iso8601==2.0.0
|
||||||
itsdangerous==2.0.1
|
itsdangerous==2.1.2
|
||||||
Jinja2==2.10.3
|
Jinja2==3.1.2
|
||||||
keystoneauth1==5.1.1
|
keystoneauth1==4.3.1
|
||||||
MarkupSafe==2.0.1
|
MarkupSafe==2.1.3
|
||||||
os-service-types==1.7.0
|
os-service-types==1.7.0
|
||||||
pbr==5.5.1
|
pbr==5.11.1
|
||||||
pip==23.0.1
|
pip==23.1.2
|
||||||
PTable==0.9.2
|
PTable==0.9.2
|
||||||
pylibyaml==0.1.0
|
pylibyaml==0.1.0
|
||||||
python-dateutil==2.8.1
|
python-dateutil==2.8.2
|
||||||
PyYAML==5.4.1
|
PyYAML==6.0
|
||||||
requests==2.23.0
|
requests==2.31.0
|
||||||
setuptools==45.2.0
|
setuptools==67.7.2
|
||||||
six==1.16.0
|
six==1.16.0
|
||||||
stevedore==5.0.0
|
stevedore==5.1.0
|
||||||
urllib3==1.25.11
|
urllib3==1.26.16
|
||||||
Werkzeug==0.16.1
|
Werkzeug==2.2.3
|
||||||
wheel==0.40.0
|
wheel==0.40.0
|
||||||
|
zipp==3.15.0
|
||||||
|
@ -1,13 +1,13 @@
|
|||||||
# Testing
|
# Testing
|
||||||
pyflakes==2.2.0
|
pyflakes
|
||||||
amqp==2.6.1
|
pytest==7.4.0
|
||||||
pytest==3.5.0
|
pytest-cov==4.1.0
|
||||||
pytest-cov==2.5.1
|
coverage==5.3
|
||||||
responses==0.10.2
|
responses
|
||||||
testfixtures==5.1.1
|
testfixtures
|
||||||
|
|
||||||
# Linting
|
# Linting
|
||||||
flake8==3.8.4
|
flake8
|
||||||
|
|
||||||
# Security scanning
|
# Security scanning
|
||||||
bandit==1.6.0 # Apache-2.0
|
bandit==1.6.0 # Apache-2.0
|
||||||
|
@ -169,7 +169,6 @@ GET_STEP_API_RESP = """
|
|||||||
"try_number": 1,
|
"try_number": 1,
|
||||||
"task_id": "preflight",
|
"task_id": "preflight",
|
||||||
"state": "success",
|
"state": "success",
|
||||||
"execution_date": "2017-09-24 19:05:49",
|
|
||||||
"dag_id": "deploy_site",
|
"dag_id": "deploy_site",
|
||||||
"index": 1,
|
"index": 1,
|
||||||
"start_date": "2017-09-24 19:05:59.281032",
|
"start_date": "2017-09-24 19:05:59.281032",
|
||||||
@ -311,8 +310,7 @@ WF_API_RESP = """
|
|||||||
"try_number": 1,
|
"try_number": 1,
|
||||||
"state": "success",
|
"state": "success",
|
||||||
"operator": "PythonOperator",
|
"operator": "PythonOperator",
|
||||||
"dag_id": "deploy_site",
|
"dag_id": "deploy_site"
|
||||||
"execution_date": "2017-10-09 21:19:03"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"end_date": "2017-10-09 21:19:25.283785",
|
"end_date": "2017-10-09 21:19:25.283785",
|
||||||
@ -323,8 +321,7 @@ WF_API_RESP = """
|
|||||||
"try_number": 1,
|
"try_number": 1,
|
||||||
"state": "success",
|
"state": "success",
|
||||||
"operator": "ConcurrencyCheckOperator",
|
"operator": "ConcurrencyCheckOperator",
|
||||||
"dag_id": "deploy_site",
|
"dag_id": "deploy_site"
|
||||||
"execution_date": "2017-10-09 21:19:03"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"end_date": "2017-10-09 21:20:05.394677",
|
"end_date": "2017-10-09 21:20:05.394677",
|
||||||
@ -335,8 +332,7 @@ WF_API_RESP = """
|
|||||||
"try_number": 1,
|
"try_number": 1,
|
||||||
"state": "failed",
|
"state": "failed",
|
||||||
"operator": "SubDagOperator",
|
"operator": "SubDagOperator",
|
||||||
"dag_id": "deploy_site",
|
"dag_id": "deploy_site"
|
||||||
"execution_date": "2017-10-09 21:19:03"
|
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"dag_id": "deploy_site",
|
"dag_id": "deploy_site",
|
||||||
@ -344,7 +340,6 @@ WF_API_RESP = """
|
|||||||
"run_id": "manual__2017-10-09T21:19:03",
|
"run_id": "manual__2017-10-09T21:19:03",
|
||||||
"sub_dags": [
|
"sub_dags": [
|
||||||
{
|
{
|
||||||
"execution_date": "2017-10-09 21:19:03",
|
|
||||||
"end_date": null,
|
"end_date": null,
|
||||||
"workflow_id": "deploy_site.preflight__2017-10-09T21:19:03.000000",
|
"workflow_id": "deploy_site.preflight__2017-10-09T21:19:03.000000",
|
||||||
"start_date": "2017-10-09 21:19:35.082479",
|
"start_date": "2017-10-09 21:19:35.082479",
|
||||||
@ -354,7 +349,6 @@ WF_API_RESP = """
|
|||||||
"run_id": "backfill_2017-10-09T21:19:03"
|
"run_id": "backfill_2017-10-09T21:19:03"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"execution_date": "2017-10-09 21:19:03",
|
|
||||||
"end_date": null,
|
"end_date": null,
|
||||||
"workflow_id": "deploy_site.postflight__2017-10-09T21:19:03.000000",
|
"workflow_id": "deploy_site.postflight__2017-10-09T21:19:03.000000",
|
||||||
"start_date": "2017-10-09 21:19:35.082479",
|
"start_date": "2017-10-09 21:19:35.082479",
|
||||||
|
@ -11,18 +11,6 @@ deps =
|
|||||||
-r{toxinidir}/requirements-frozen.txt
|
-r{toxinidir}/requirements-frozen.txt
|
||||||
-r{toxinidir}/test-requirements.txt
|
-r{toxinidir}/test-requirements.txt
|
||||||
|
|
||||||
[testenv:freeze-bionic]
|
|
||||||
recreate = True
|
|
||||||
allowlist_externals=
|
|
||||||
rm
|
|
||||||
sh
|
|
||||||
deps=
|
|
||||||
-r{toxinidir}/requirements-direct.txt
|
|
||||||
commands=
|
|
||||||
rm -f {toxinidir}/requirements-frozen-bionic.txt
|
|
||||||
sh -c "pip freeze --all | grep -vE 'shipyard_airflow|pyinotify|pkg-resources' > requirements-frozen-bionic.txt"
|
|
||||||
|
|
||||||
|
|
||||||
[testenv:freeze]
|
[testenv:freeze]
|
||||||
recreate = True
|
recreate = True
|
||||||
allowlist_externals=
|
allowlist_externals=
|
||||||
@ -30,20 +18,11 @@ allowlist_externals=
|
|||||||
sh
|
sh
|
||||||
deps=
|
deps=
|
||||||
-r{toxinidir}/requirements-direct.txt
|
-r{toxinidir}/requirements-direct.txt
|
||||||
|
-c https://raw.githubusercontent.com/apache/airflow/constraints-2.6.2/constraints-3.8.txt
|
||||||
commands=
|
commands=
|
||||||
rm -f {toxinidir}/requirements-frozen.txt
|
rm -f {toxinidir}/requirements-frozen.txt
|
||||||
sh -c "pip freeze --all | grep -vE 'shipyard_airflow|pyinotify|pkg-resources' > requirements-frozen.txt"
|
sh -c "pip freeze --all | grep -vE 'shipyard_airflow|pyinotify|pkg-resources' > requirements-frozen.txt"
|
||||||
|
|
||||||
[testenv:py36]
|
|
||||||
skipsdist=True
|
|
||||||
deps =
|
|
||||||
-r{toxinidir}/requirements-frozen-bionic.txt
|
|
||||||
-r{toxinidir}/test-requirements.txt
|
|
||||||
commands =
|
|
||||||
pytest \
|
|
||||||
{posargs} \
|
|
||||||
-vv
|
|
||||||
|
|
||||||
[testenv:py38]
|
[testenv:py38]
|
||||||
skipsdist=True
|
skipsdist=True
|
||||||
commands =
|
commands =
|
||||||
@ -67,9 +46,6 @@ commands =
|
|||||||
|
|
||||||
|
|
||||||
[testenv:pep8]
|
[testenv:pep8]
|
||||||
deps=
|
|
||||||
flake8>=3.3.0
|
|
||||||
bandit>=1.5.0
|
|
||||||
commands =
|
commands =
|
||||||
flake8 {toxinidir}/shipyard_client
|
flake8 {toxinidir}/shipyard_client
|
||||||
bandit -r shipyard_airflow
|
bandit -r shipyard_airflow
|
||||||
|
@ -23,10 +23,11 @@
|
|||||||
|
|
||||||
tasks:
|
tasks:
|
||||||
|
|
||||||
- name: Install Packaging python module for airship
|
- name: Install Packaging python module for tools/airship
|
||||||
block:
|
block:
|
||||||
- pip:
|
- pip:
|
||||||
name: packaging
|
name: packaging
|
||||||
|
version: 23.1
|
||||||
executable: pip3
|
executable: pip3
|
||||||
become: True
|
become: True
|
||||||
|
|
||||||
@ -61,7 +62,7 @@
|
|||||||
args:
|
args:
|
||||||
chdir: "{{ zuul.projects['opendev.org/airship/treasuremap'].src_dir }}"
|
chdir: "{{ zuul.projects['opendev.org/airship/treasuremap'].src_dir }}"
|
||||||
|
|
||||||
- name: Setup openstack client
|
- name: Setup OpenStack Client
|
||||||
shell: |
|
shell: |
|
||||||
set -ex
|
set -ex
|
||||||
./tools/deployment/airskiff/developer/020-setup-client.sh
|
./tools/deployment/airskiff/developer/020-setup-client.sh
|
||||||
@ -69,42 +70,35 @@
|
|||||||
chdir: "{{ zuul.projects['opendev.org/airship/treasuremap'].src_dir }}"
|
chdir: "{{ zuul.projects['opendev.org/airship/treasuremap'].src_dir }}"
|
||||||
become: yes
|
become: yes
|
||||||
|
|
||||||
|
- name: Build all charts locally
|
||||||
|
shell: |
|
||||||
|
set -ex
|
||||||
|
./tools/deployment/airskiff/developer/015-make-all-charts.sh
|
||||||
|
args:
|
||||||
|
chdir: "{{ zuul.projects['opendev.org/airship/treasuremap'].src_dir }}"
|
||||||
|
become: yes
|
||||||
|
|
||||||
- name: Build Shipyard and Airflow with submitted changes
|
- name: Build Shipyard and Airflow with submitted changes
|
||||||
shell: |
|
shell: |
|
||||||
set -ex
|
set -ex
|
||||||
export DISTRO={{ SHIPYARD_IMAGE_DISTRO }}
|
export DISTRO={{ SHIPYARD_IMAGE_DISTRO }}
|
||||||
make images
|
make images
|
||||||
docker system prune --force
|
|
||||||
args:
|
args:
|
||||||
chdir: "{{ zuul.project.src_dir }}"
|
chdir: "{{ zuul.project.src_dir }}"
|
||||||
become: yes
|
become: yes
|
||||||
|
|
||||||
|
|
||||||
- name: Use locally built images in manifests
|
- name: Use locally built images in manifests
|
||||||
shell: |
|
shell: |
|
||||||
set -ex
|
set -ex
|
||||||
export DISTRO={{ SHIPYARD_IMAGE_DISTRO }}
|
|
||||||
docker rm registry --force || true
|
docker rm registry --force || true
|
||||||
docker run -d -p 5000:5000 --restart=always --name registry registry:2
|
docker run -d -p 5000:5000 --restart=always --name registry registry:2
|
||||||
if test "${DISTRO}" = 'ubuntu_bionic'
|
docker tag quay.io/airshipit/airflow:latest-ubuntu_focal localhost:5000/airflow:latest-ubuntu_focal
|
||||||
then
|
docker tag quay.io/airshipit/shipyard:latest-ubuntu_focal localhost:5000/shipyard:latest-ubuntu_focal
|
||||||
docker tag quay.io/airshipit/shipyard:latest-ubuntu_bionic localhost:5000/shipyard:latest-ubuntu_bionic
|
docker push localhost:5000/airflow:latest-ubuntu_focal
|
||||||
docker push localhost:5000/shipyard:latest-ubuntu_bionic
|
docker push localhost:5000/shipyard:latest-ubuntu_focal
|
||||||
sed -i "s#quay.io/airshipit/shipyard:latest-ubuntu_focal#localhost:5000/shipyard:latest-ubuntu_bionic#g" ./site/airskiff/software/config/versions.yaml
|
sed -i "s#quay.io/airshipit/airflow:latest-ubuntu_focal#localhost:5000/airflow:latest-ubuntu_focal#g" ./site/airskiff/software/config/versions.yaml
|
||||||
sed -i "s#quay.io/airshipit/shipyard:latest-ubuntu_focal#localhost:5000/shipyard:latest-ubuntu_bionic#g" ./global/software/config/versions.yaml
|
sed -i "s#quay.io/airshipit/shipyard:latest-ubuntu_focal#localhost:5000/shipyard:latest-ubuntu_focal#g" ./site/airskiff/software/config/versions.yaml
|
||||||
else
|
|
||||||
docker tag quay.io/airshipit/shipyard:latest-ubuntu_focal localhost:5000/shipyard:latest-ubuntu_focal
|
|
||||||
docker push localhost:5000/shipyard:latest-ubuntu_focal
|
|
||||||
sed -i "s#quay.io/airshipit/shipyard:latest-ubuntu_focal#localhost:5000/shipyard:latest-ubuntu_focal#g" ./site/airskiff/software/config/versions.yaml
|
|
||||||
sed -i "s#quay.io/airshipit/shipyard:latest-ubuntu_focal#localhost:5000/shipyard:latest-ubuntu_focal#g" ./global/software/config/versions.yaml
|
|
||||||
fi
|
|
||||||
args:
|
|
||||||
chdir: "{{ zuul.projects['opendev.org/airship/treasuremap'].src_dir }}"
|
|
||||||
become: yes
|
|
||||||
|
|
||||||
- name: Build all charts locally
|
|
||||||
shell: |
|
|
||||||
set -ex
|
|
||||||
./tools/deployment/airskiff/developer/015-make-all-charts.sh
|
|
||||||
args:
|
args:
|
||||||
chdir: "{{ zuul.projects['opendev.org/airship/treasuremap'].src_dir }}"
|
chdir: "{{ zuul.projects['opendev.org/airship/treasuremap'].src_dir }}"
|
||||||
become: yes
|
become: yes
|
||||||
@ -121,8 +115,10 @@
|
|||||||
chdir: "{{ zuul.projects['opendev.org/airship/treasuremap'].src_dir }}"
|
chdir: "{{ zuul.projects['opendev.org/airship/treasuremap'].src_dir }}"
|
||||||
become: yes
|
become: yes
|
||||||
|
|
||||||
|
|
||||||
- name: Deploy Airship components using Armada
|
- name: Deploy Airship components using Armada
|
||||||
shell: |
|
shell: |
|
||||||
|
set -ex
|
||||||
mkdir -p ~/.kube
|
mkdir -p ~/.kube
|
||||||
cp -rp /home/zuul/.kube/config ~/.kube/config
|
cp -rp /home/zuul/.kube/config ~/.kube/config
|
||||||
./tools/deployment/airskiff/developer/030-armada-bootstrap.sh
|
./tools/deployment/airskiff/developer/030-armada-bootstrap.sh
|
||||||
@ -143,3 +139,12 @@
|
|||||||
args:
|
args:
|
||||||
chdir: "{{ zuul.projects['opendev.org/airship/treasuremap'].src_dir }}"
|
chdir: "{{ zuul.projects['opendev.org/airship/treasuremap'].src_dir }}"
|
||||||
become: yes
|
become: yes
|
||||||
|
|
||||||
|
- name: Stop artifactory
|
||||||
|
shell: |
|
||||||
|
set -ex
|
||||||
|
# terminate artifactory
|
||||||
|
docker rm artifacts --force || true
|
||||||
|
args:
|
||||||
|
chdir: "{{ zuul.project.src_dir }}"
|
||||||
|
become: yes
|
||||||
|
@ -17,7 +17,7 @@
|
|||||||
set -x
|
set -x
|
||||||
|
|
||||||
HELM=$1
|
HELM=$1
|
||||||
HELM_ARTIFACT_URL=${HELM_ARTIFACT_URL:-"https://get.helm.sh/helm-v3.9.4-linux-amd64.tar.gz"}
|
HELM_ARTIFACT_URL=${HELM_ARTIFACT_URL:-"https://get.helm.sh/helm-v3.12.2-linux-amd64.tar.gz"}
|
||||||
|
|
||||||
|
|
||||||
function install_helm_binary {
|
function install_helm_binary {
|
||||||
|
9
tox.ini
9
tox.ini
@ -49,15 +49,6 @@ commands=
|
|||||||
bash -c "cp {toxinidir}/src/bin/shipyard_airflow/etc/shipyard/policy.yaml.sample {toxinidir}/doc/source/_static/shipyard.policy.yaml.sample"
|
bash -c "cp {toxinidir}/src/bin/shipyard_airflow/etc/shipyard/policy.yaml.sample {toxinidir}/doc/source/_static/shipyard.policy.yaml.sample"
|
||||||
bash -c "cp {toxinidir}/src/bin/shipyard_airflow/etc/shipyard/shipyard.conf.sample {toxinidir}/doc/source/_static/shipyard.conf.sample"
|
bash -c "cp {toxinidir}/src/bin/shipyard_airflow/etc/shipyard/shipyard.conf.sample {toxinidir}/doc/source/_static/shipyard.conf.sample"
|
||||||
|
|
||||||
[testenv:py36]
|
|
||||||
deps =
|
|
||||||
tox<=4.0.0
|
|
||||||
allowlist_externals=
|
|
||||||
tox
|
|
||||||
commands=
|
|
||||||
tox -c {toxinidir}/src/bin/shipyard_airflow/tox.ini -e py36
|
|
||||||
tox -c {toxinidir}/src/bin/shipyard_client/tox.ini -e py36
|
|
||||||
|
|
||||||
[testenv:cover]
|
[testenv:cover]
|
||||||
deps =
|
deps =
|
||||||
tox<=4.0.0
|
tox<=4.0.0
|
||||||
|
Loading…
Reference in New Issue
Block a user