From 0aca9f5372502c2c9336d91f4e20ab8d35bf1c5c Mon Sep 17 00:00:00 2001 From: Wei Zhou Date: Wed, 18 Sep 2019 17:14:41 -0400 Subject: [PATCH] Remove duplicate code in Ansible playbooks This commit removes some duplicate code in Ansible playbooks repo. Tests done: - remote and local bootstrap controller-0 - remote and local backup - remote and local restore_platform Change-Id: Id6894c6ae5fcb527a2619c2838fa6510e6142a30 Story: 2004761 Task: 36571 Signed-off-by: Wei Zhou --- playbookconfig/src/playbooks/backup.yml | 22 +---- playbookconfig/src/playbooks/bootstrap.yml | 5 +- .../host_vars/backup-restore/default.yml | 82 +++++++++++++++++++ .../host_vars/{ => bootstrap}/default.yml | 50 +---------- .../src/playbooks/restore_platform.yml | 14 ++-- .../prepare-env}/tasks/main.yml | 26 ------ .../transfer-file/tasks/main.yml | 47 +++++++++++ .../prepare-env/tasks/load_patching_tasks.yml | 53 ------------ .../bootstrap/prepare-env/tasks/main.yml | 67 --------------- .../roles/bootstrap/prepare-env/vars/main.yml | 1 - .../roles/common/prepare-env/tasks/main.yml | 75 +++++++++++++++++ .../include-override-files/tasks/main.yml | 22 ----- .../tasks/main.yml | 46 ++++++++++- 13 files changed, 263 insertions(+), 247 deletions(-) create mode 100644 playbookconfig/src/playbooks/host_vars/backup-restore/default.yml rename playbookconfig/src/playbooks/host_vars/{ => bootstrap}/default.yml (81%) rename playbookconfig/src/playbooks/roles/{common => backup-restore/prepare-env}/tasks/main.yml (64%) create mode 100644 playbookconfig/src/playbooks/roles/backup-restore/transfer-file/tasks/main.yml create mode 100644 playbookconfig/src/playbooks/roles/common/prepare-env/tasks/main.yml delete mode 100644 playbookconfig/src/playbooks/roles/include-override-files/tasks/main.yml rename playbookconfig/src/playbooks/roles/restore-platform/{pre-restore-bootstrap => prepare-env}/tasks/main.yml (56%) diff --git a/playbookconfig/src/playbooks/backup.yml b/playbookconfig/src/playbooks/backup.yml index f6ac945b6..91f9063de 100644 --- a/playbookconfig/src/playbooks/backup.yml +++ b/playbookconfig/src/playbooks/backup.yml @@ -10,27 +10,11 @@ gather_facts: no vars_files: - - host_vars/default.yml - - pre_tasks: - - stat: - path: "{{ item }}" - register: files_to_import - with_items: - - "{{ override_files_dir }}/secrets.yml" - - "{{ override_files_dir }}/{{ inventory_hostname }}_secrets.yml" - - "{{ override_files_dir }}/site.yml" - - "{{ override_files_dir }}/{{ inventory_hostname }}.yml" - delegate_to: localhost - - - include_vars: "{{ item.item }}" - when: item.stat.exists - with_items: "{{ files_to_import.results }}" - loop_control: - label: "{{ item.item }}" + - host_vars/backup-restore/default.yml # Main play roles: - - common + - { role: common/prepare-env } + - { role: backup-restore/prepare-env } - { role: backup/prepare-env, become: yes } - { role: backup/backup-system, become: yes } diff --git a/playbookconfig/src/playbooks/bootstrap.yml b/playbookconfig/src/playbooks/bootstrap.yml index 4f1fab637..65fc36f9e 100644 --- a/playbookconfig/src/playbooks/bootstrap.yml +++ b/playbookconfig/src/playbooks/bootstrap.yml @@ -10,11 +10,11 @@ gather_facts: no vars_files: - - host_vars/default.yml + - host_vars/bootstrap/default.yml # Main play roles: - - include-override-files + - common/prepare-env - bootstrap/prepare-env - { role: bootstrap/validate-config, when: not skip_play, become: yes } - { role: bootstrap/store-passwd, when: not skip_play and save_password, become: yes } @@ -23,7 +23,6 @@ - { role: bootstrap/bringup-essential-services, when: not skip_play, become: yes } vars: - change_password: false skip_play: false replayed: false mode: 'bootstrap' diff --git a/playbookconfig/src/playbooks/host_vars/backup-restore/default.yml b/playbookconfig/src/playbooks/host_vars/backup-restore/default.yml new file mode 100644 index 000000000..5b56bbec3 --- /dev/null +++ b/playbookconfig/src/playbooks/host_vars/backup-restore/default.yml @@ -0,0 +1,82 @@ +--- +# ADMIN CREDENTIALS +# ================= +# +# WARNING: It is strongly recommended to store these settings in Ansible vault +# file named "secret" under override files directory. Configuration parameters +# stored in vault must start with vault_ prefix (i.e. vault_admin_username, +# vault_admin_password). +# +admin_username: admin +admin_password: St8rlingX* + +# INITIAL PASSWORD CHANGE RESPONSE SEQUENCE +# ========================================= +# +# The following two parameters are only relevant when the target host is bootstrapped +# remotely and the user wishes to change the initial sysadmin password as part of the +# bootstrap. +# +# WARNING: It is strongly recommended to store this setting in Ansible vault +# file named "secret" under override files directory. Configuration parameters +# stored in vault must start with vault_ prefix (i.e. vault_password_change_responses) +# +password_change: false + +password_change_responses: + yes/no: 'yes' + sysadmin*: 'sysadmin' + \(current\) UNIX password: 'sysadmin' + (?i)New password: 'St8rlingX*' + (?i)Retype new password: 'St8rlingX*' + +# OVERRIDE FILES DIRECTORY +# ======================== +# +# Default directory where user override file(s) can be found +# +override_files_dir: "{{ lookup('env', 'HOME') }}" + +# BACKUP AND RESTORE +# ================== +# +# Location where the backup tar file is placed to perform the restore. +# This location must be specified at the command line via ansible-playbook -e option. +initial_backup_dir: + +# This variable refers to the tar file that is generated by the backup +# procedure and used in the restore phase. The filename must be specified +# at the command line via ansible-playbook -e option. +backup_filename: + +# Default directory where the backup tar file(s) can be found +# on the active controller +backup_dir: /opt/backups + +# The platform backup tarball will be named in this format: +# _.tgz +# +platform_backup_filename_prefix: "{{ inventory_hostname }}_platform_backup" + +# The stx-openstack application backup tarball will be named in this format: +# _.tgz +# +openstack_backup_filename_prefix: "{{ inventory_hostname }}_openstack_backup" + +# An indication whether it is a full restore or partial restore. +# true: a full restore where storage partition(s) is/are wiped during +# platform restore and ceph data needs restored +# false: a partial restore where ceph data remain intact during restore +# +# This variable is used for StarlingX OpenStack application restore only +# +restore_ceph_data: false + +# Default directory where the system backup tarballs fetched from the +# active controller can be found +# +host_backup_dir: "{{ lookup('env', 'HOME') }}" + +# Flag file to indicate if platform restore is in progress +# +restore_in_progress_flag: /etc/platform/.restore_in_progress diff --git a/playbookconfig/src/playbooks/host_vars/default.yml b/playbookconfig/src/playbooks/host_vars/bootstrap/default.yml similarity index 81% rename from playbookconfig/src/playbooks/host_vars/default.yml rename to playbookconfig/src/playbooks/host_vars/bootstrap/default.yml index 12f4b2dc1..1b56a4e0d 100644 --- a/playbookconfig/src/playbooks/host_vars/default.yml +++ b/playbookconfig/src/playbooks/host_vars/bootstrap/default.yml @@ -207,14 +207,16 @@ admin_password: St8rlingX* # INITIAL PASSWORD CHANGE RESPONSE SEQUENCE # ========================================= # -# This parameter is only relevant when the target host is bootstrapped remotely -# and the user wishes to change the initial sysadmin password as part of the +# The following two parameters are only relevant when the target host is bootstrapped +# remotely and the user wishes to change the initial sysadmin password as part of the # bootstrap. # # WARNING: It is strongly recommended to store this setting in Ansible vault # file named "secret" under override files directory. Configuration parameters # stored in vault must start with vault_ prefix (i.e. vault_password_change_responses) # +password_change: false + password_change_responses: yes/no: 'yes' sysadmin*: 'sysadmin' @@ -228,47 +230,3 @@ password_change_responses: # Default directory where user override file(s) can be found # override_files_dir: "{{ lookup('env', 'HOME') }}" - -# BACKUP AND RESTORE -# ================== -# -# Location where the backup tar file is placed to perform the restore. -# This location must be specified at the command line via ansible-playbook -e option. -initial_backup_dir: - -# This variable refers to the tar file that is generated by the backup -# procedure and used in the restore phase. The filename must be specified -# at the command line via ansible-playbook -e option. -backup_filename: - -# Default directory where the backup tar file(s) can be found -# on the active controller -backup_dir: /opt/backups - -# The platform backup tarball will be named in this format: -# _.tgz -# -platform_backup_filename_prefix: "{{ inventory_hostname }}_platform_backup" - -# The stx-openstack application backup tarball will be named in this format: -# _.tgz -# -openstack_backup_filename_prefix: "{{ inventory_hostname }}_openstack_backup" - -# An indication whether it is a full restore or partial restore. -# true: a full restore where storage partition(s) is/are wiped during -# platform restore and ceph data needs restored -# false: a partial restore where ceph data remain intact during restore -# -# This variable is used for StarlingX OpenStack application restore only -# -restore_ceph_data: false - -# Default directory where the system backup tarballs fetched from the -# active controller can be found -# -host_backup_dir: "{{ lookup('env', 'HOME') }}" - -# Flag file to indicate if platform restore is in progress -# -restore_in_progress_flag: /etc/platform/.restore_in_progress diff --git a/playbookconfig/src/playbooks/restore_platform.yml b/playbookconfig/src/playbooks/restore_platform.yml index dd8b53651..01b54a5a7 100644 --- a/playbookconfig/src/playbooks/restore_platform.yml +++ b/playbookconfig/src/playbooks/restore_platform.yml @@ -8,14 +8,12 @@ gather_facts: no vars_files: - - host_vars/default.yml + - host_vars/backup-restore/default.yml roles: - - include-override-files - - restore-platform/pre-restore-bootstrap - - vars: - change_password: false + - common/prepare-env + - restore-platform/prepare-env + - backup-restore/transfer-file - name: Run bootstrap playbook with restore mode import_playbook: bootstrap.yml mode='restore' @@ -24,8 +22,8 @@ gather_facts: no vars_files: - - host_vars/default.yml + - host_vars/backup-restore/default.yml roles: - - include-override-files + - common/prepare-env - { role: restore-platform/restore-more-data, become: yes } diff --git a/playbookconfig/src/playbooks/roles/common/tasks/main.yml b/playbookconfig/src/playbooks/roles/backup-restore/prepare-env/tasks/main.yml similarity index 64% rename from playbookconfig/src/playbooks/roles/common/tasks/main.yml rename to playbookconfig/src/playbooks/roles/backup-restore/prepare-env/tasks/main.yml index 28f1eaabe..461a4018a 100644 --- a/playbookconfig/src/playbooks/roles/common/tasks/main.yml +++ b/playbookconfig/src/playbooks/roles/backup-restore/prepare-env/tasks/main.yml @@ -8,32 +8,6 @@ # This role contains common components (tasks, vars, etc.) that # can be shared by all the backup and restore playbooks. -# Check host connectivity -- block: - - name: Set SSH port - set_fact: - ansible_port: "{{ ansible_port | default(22) }}" - - - name: Update SSH known hosts - lineinfile: - path: ~/.ssh/known_hosts - state: absent - regexp: '^{{ ansible_host }}|^\[{{ ansible_host }}\]:{{ ansible_port }}' - delegate_to: localhost - - - name: Check connectivity - local_action: command ping -c 1 {{ ansible_host }} - failed_when: false - register: ping_result - - - name: Fail if host is unreachable - fail: msg='Host {{ ansible_host }} is unreachable!' - with_items: - - "{{ ping_result.stdout_lines|list }}" - when: ping_result.rc != 0 and item is search('100% packet loss') - - when: inventory_hostname != 'localhost' - - name: Check archive dir stat: path: "{{ backup_dir }}" diff --git a/playbookconfig/src/playbooks/roles/backup-restore/transfer-file/tasks/main.yml b/playbookconfig/src/playbooks/roles/backup-restore/transfer-file/tasks/main.yml new file mode 100644 index 000000000..e4d0c08b8 --- /dev/null +++ b/playbookconfig/src/playbooks/roles/backup-restore/transfer-file/tasks/main.yml @@ -0,0 +1,47 @@ +--- +# +# Copyright (c) 2019 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# +# SUB-TASKS DESCRIPTION: +# For remote play transfer the backup tar file to controller-0 + +- block: + # Check if the backup tarball already exists. If it is the second run + # after the reboot, no need to transfer the backup tarball again. + - name: Check if {{ backup_filename }} has been uploaded already + stat: + path: "{{ target_backup_dir }}/{{ backup_filename }}" + register: check_backup_tarball + + - block: + # TODO(wzhou): Considering to break backup tarball into multiple small tarfiles + # During restore upload each small tarfile one at a time to restore a subfunction. + + # Because Ansible copy module uses ansible_remote_tmp directory as + # a staging area to transfer file, the default ansible_remote_tmp + # which is set in /tmp (1GB) may be too small for backup tarball, + # we require user to set ansible_remote_tmp to a new directory in + # /home/sysadmin via -e option on the command line. For example: + # -e "ansible_remote_tmp=/home/sysadmin/ansible-restore" + - name: Transfer backup tarball to {{ target_backup_dir }} on controller-0 + copy: + src: "{{ initial_backup_dir }}/{{ backup_filename }}" + dest: "{{ target_backup_dir }}" + owner: root + group: root + mode: 0644 + + # As an alternative to Ansible copy, synchronize module may be + # used to transfer large files. But synchronize is broken in Ansible 2.8 + # https://github.com/ansible/ansible/issues/56629. + # - name: Transfer backup tarball to /scratch on controller-0 + # synchronize: + # src: "{{ initial_backup_dir }}/{{ backup_filename }}" + # dest: "{{ target_backup_dir }}/{{ backup_filename }}" + + when: not check_backup_tarball.stat.exists + when: inventory_hostname != "localhost" + become: yes + become_user: root diff --git a/playbookconfig/src/playbooks/roles/bootstrap/prepare-env/tasks/load_patching_tasks.yml b/playbookconfig/src/playbooks/roles/bootstrap/prepare-env/tasks/load_patching_tasks.yml index 1433696b5..06b04ae7f 100644 --- a/playbookconfig/src/playbooks/roles/bootstrap/prepare-env/tasks/load_patching_tasks.yml +++ b/playbookconfig/src/playbooks/roles/bootstrap/prepare-env/tasks/load_patching_tasks.yml @@ -11,59 +11,6 @@ # - reboot the controller if it is required by the patching # - block: - - name: Create {{ restore_in_progress_flag }} flag file - file: - path: "{{ restore_in_progress_flag }}" - state: touch - - # For remote play the backup tarball will be transferred to /scratch - - block: - # Check if the backup tarball already exists. If it is the second run - # after the reboot, no need to transfer the backup tarball again. - - name: Check if {{ backup_filename }} has been uploaded already - stat: - path: "/scratch/{{ backup_filename }}" - register: check_backup_tarball - - - block: - # TODO(wzhou): Considering to break backup tarball into multiple small tarfiles - # During restore upload each small tarfile one at a time to restore a subfunction. - - # Because Ansible copy module uses ansible_remote_tmp directory as - # a staging area to transfer file, the default ansible_remote_tmp - # which is set in /tmp (1GB) may be too small for backup tarball, - # we require user to set ansible_remote_tmp to a new directory in - # /home/sysadmin via -e option on the command line. For example: - # -e "ansible_remote_tmp=/home/sysadmin/ansible-restore" - - name: Transfer backup tarball to /scratch on controller-0 - copy: - src: "{{ initial_backup_dir }}/{{ backup_filename }}" - dest: /scratch - owner: root - group: root - mode: 0644 - - # As an alternative to Ansible copy, synchronize module may be - # used to transfer large files. But synchronize is broken in Ansible 2.8 - # https://github.com/ansible/ansible/issues/56629. - # - name: Transfer backup tarball to /scratch on controller-0 - # synchronize: - # src: "{{ initial_backup_dir }}/{{ backup_filename }}" - # dest: "/scratch/{{ backup_filename }}" - - when: not check_backup_tarball.stat.exists - - - name: Set target_backup_dir to /scratch - set_fact: - target_backup_dir: /scratch - - when: inventory_hostname != "localhost" - - - name: For local play set target_backup_dir to initial_backup_dir - set_fact: - target_backup_dir: "{{ initial_backup_dir }}" - when: inventory_hostname == "localhost" - - name: Set fact for patching staging dir set_fact: patching_staging_dir: /scratch/patching diff --git a/playbookconfig/src/playbooks/roles/bootstrap/prepare-env/tasks/main.yml b/playbookconfig/src/playbooks/roles/bootstrap/prepare-env/tasks/main.yml index d54725265..2a728fe55 100644 --- a/playbookconfig/src/playbooks/roles/bootstrap/prepare-env/tasks/main.yml +++ b/playbookconfig/src/playbooks/roles/bootstrap/prepare-env/tasks/main.yml @@ -9,57 +9,6 @@ # the next step. # -# Check host connectivity, change password if provided -- block: - - name: Set SSH port - set_fact: - ansible_port: "{{ ansible_port | default(22) }}" - - - name: Update SSH known hosts - lineinfile: - path: ~/.ssh/known_hosts - state: absent - regexp: '^{{ ansible_host }}|^\[{{ ansible_host }}\]:{{ ansible_port }}' - delegate_to: localhost - - - name: Check connectivity - local_action: command ping -c 1 {{ ansible_host }} - failed_when: false - register: ping_result - - - name: Fail if host is unreachable - fail: msg='Host {{ ansible_host }} is unreachable!' - with_items: - - "{{ ping_result.stdout_lines|list }}" - when: ping_result.rc != 0 and item is search('100% packet loss') - - - block: - - name: Fail if password change response sequence is not defined - fail: msg="The mandatory parameter password_change_response is not defined." - when: (vault_password_change_responses is not defined) and - (password_change_responses is not defined) - - - debug: - msg: "Changing the initial password.." - - - name: Change initial password - expect: - echo: yes - command: "ssh -p {{ ansible_port }} {{ ansible_ssh_user }}@{{ ansible_host }}" - responses: "{{ vault_password_change_responses | default(password_change_responses) }}" - failed_when: false - delegate_to: localhost - - rescue: - # Initial password has been changed and the user forgot to exclude - # password_change option in the command line for the replay. - - debug: - msg: "Password has already been changed" - - when: change_password - - when: inventory_hostname != 'localhost' - # Check for one of unmistakenly StarlingX packages - name: "Look for unmistakenly {{ image_brand }} package" command: rpm -q controllerconfig @@ -79,12 +28,6 @@ register: initial_config_complete - block: - # Restore doesn't support replay - - name: Fail if bootstrap is in restore mode and the host has been unlocked - fail: - msg: "Host {{ ansible_host }} has been unlocked. Cannot perform restore." - when: mode == 'restore' - - name: Set skip_play flag for host set_fact: skip_play: true @@ -101,16 +44,6 @@ # Proceed only if skip_play flag is not turned on - block: - block: - - name: Check if restore is in progress if bootstrap is with restore mode - stat: - path: "{{ restore_in_progress_flag }}" - register: restore_in_progress - - - name: Fail if restore is already in progress - fail: - msg: " Restore is already in progress!" - when: restore_in_progress.stat.exists - # Do load verification and patching if required - include_tasks: load_patching_tasks.yml diff --git a/playbookconfig/src/playbooks/roles/bootstrap/prepare-env/vars/main.yml b/playbookconfig/src/playbooks/roles/bootstrap/prepare-env/vars/main.yml index 3733de6dd..7223d7673 100644 --- a/playbookconfig/src/playbooks/roles/bootstrap/prepare-env/vars/main.yml +++ b/playbookconfig/src/playbooks/roles/bootstrap/prepare-env/vars/main.yml @@ -7,6 +7,5 @@ supported_release_versions: - "19.09" patching_permdir: /opt/patching patching_repo_permdir: /www/pages/updates -restore_in_progress_flag: /etc/platform/.restore_in_progress restore_patching_complete_flag: /etc/platform/.restore_patching_complete node_is_patched_flag: /var/run/node_is_patched diff --git a/playbookconfig/src/playbooks/roles/common/prepare-env/tasks/main.yml b/playbookconfig/src/playbooks/roles/common/prepare-env/tasks/main.yml new file mode 100644 index 000000000..475e7973a --- /dev/null +++ b/playbookconfig/src/playbooks/roles/common/prepare-env/tasks/main.yml @@ -0,0 +1,75 @@ +--- +# +# Copyright (c) 2019 Wind River Systems, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# +# ROLE DESCRIPTION: +# This role is to perform tasks that are common to the playbooks. +# + +# Include user override files for a play +- stat: + path: "{{ item }}" + register: files_to_import + with_items: + - "{{ override_files_dir }}/secrets.yml" + - "{{ override_files_dir }}/{{ inventory_hostname }}_secrets.yml" + - "{{ override_files_dir }}/site.yml" + - "{{ override_files_dir }}/{{ inventory_hostname }}.yml" + delegate_to: localhost + +- include_vars: "{{ item.item }}" + when: item.stat.exists + with_items: "{{ files_to_import.results }}" + +# Check host connectivity, change password if provided +- block: + - name: Set SSH port + set_fact: + ansible_port: "{{ ansible_port | default(22) }}" + + - name: Update SSH known hosts + lineinfile: + path: ~/.ssh/known_hosts + state: absent + regexp: '^{{ ansible_host }}|^\[{{ ansible_host }}\]:{{ ansible_port }}' + delegate_to: localhost + + - name: Check connectivity + local_action: command ping -c 1 {{ ansible_host }} + failed_when: false + register: ping_result + + - name: Fail if host is unreachable + fail: msg='Host {{ ansible_host }} is unreachable!' + with_items: + - "{{ ping_result.stdout_lines|list }}" + when: ping_result.rc != 0 and item is search('100% packet loss') + + - block: + - name: Fail if password change response sequence is not defined + fail: msg="The mandatory parameter password_change_response is not defined." + when: (vault_password_change_responses is not defined) and + (password_change_responses is not defined) + + - debug: + msg: "Changing the initial password.." + + - name: Change initial password + expect: + echo: yes + command: "ssh -p {{ ansible_port }} {{ ansible_ssh_user }}@{{ ansible_host }}" + responses: "{{ vault_password_change_responses | default(password_change_responses) }}" + failed_when: false + delegate_to: localhost + + rescue: + # Initial password has been changed and the user forgot to exclude + # password_change option in the command line for the replay. + - debug: + msg: "Password has already been changed" + + when: password_change + + when: inventory_hostname != 'localhost' diff --git a/playbookconfig/src/playbooks/roles/include-override-files/tasks/main.yml b/playbookconfig/src/playbooks/roles/include-override-files/tasks/main.yml deleted file mode 100644 index b07af8087..000000000 --- a/playbookconfig/src/playbooks/roles/include-override-files/tasks/main.yml +++ /dev/null @@ -1,22 +0,0 @@ ---- -# -# Copyright (c) 2019 Wind River Systems, Inc. -# -# SPDX-License-Identifier: Apache-2.0 -# -# ROLE DESCRIPTION: -# This role is to include user override files for a play. -# -- stat: - path: "{{ item }}" - register: files_to_import - with_items: - - "{{ override_files_dir }}/secrets.yml" - - "{{ override_files_dir }}/{{ inventory_hostname }}_secrets.yml" - - "{{ override_files_dir }}/site.yml" - - "{{ override_files_dir }}/{{ inventory_hostname }}.yml" - delegate_to: localhost - -- include_vars: "{{ item.item }}" - when: item.stat.exists - with_items: "{{ files_to_import.results }}" diff --git a/playbookconfig/src/playbooks/roles/restore-platform/pre-restore-bootstrap/tasks/main.yml b/playbookconfig/src/playbooks/roles/restore-platform/prepare-env/tasks/main.yml similarity index 56% rename from playbookconfig/src/playbooks/roles/restore-platform/pre-restore-bootstrap/tasks/main.yml rename to playbookconfig/src/playbooks/roles/restore-platform/prepare-env/tasks/main.yml index 9fb2453e4..34c42fe97 100644 --- a/playbookconfig/src/playbooks/roles/restore-platform/pre-restore-bootstrap/tasks/main.yml +++ b/playbookconfig/src/playbooks/roles/restore-platform/prepare-env/tasks/main.yml @@ -5,8 +5,10 @@ # SPDX-License-Identifier: Apache-2.0 # # ROLE DESCRIPTION: -# This role is to retrieve the override file from the backup tarball -# required for the controller bootstrap. +# This role performs following tasks: +# 1. Retrieve the override file from the backup tarball +# required for the controller bootstrap. +# 2. Verify if platform restore should proceed # - block: - name: Fail if backup_filename is not defined or set @@ -53,3 +55,43 @@ when: search_result.rc != 0 delegate_to: localhost + +- block: + # Bail if the host has been unlocked + - name: Check initial config flag + stat: + path: /etc/platform/.initial_config_complete + register: initial_config_done + + - name: Fail if the host has been unlocked + fail: + msg: "Host {{ ansible_host }} has been unlocked. Cannot perform restore." + when: initial_config_done.stat.exists + + - name: Check if restore is in progress if bootstrap is with restore mode + stat: + path: "{{ restore_in_progress_flag }}" + register: restore_in_progress + + - name: Fail if restore is already in progress + fail: + msg: " Restore is already in progress!" + when: restore_in_progress.stat.exists + + - name: Create {{ restore_in_progress_flag }} flag file + file: + path: "{{ restore_in_progress_flag }}" + state: touch + + - name: For remote play set target_backup_dir to /scratch + set_fact: + target_backup_dir: /scratch + when: inventory_hostname != "localhost" + + - name: For local play set target_backup_dir to initial_backup_dir + set_fact: + target_backup_dir: "{{ initial_backup_dir }}" + when: inventory_hostname == "localhost" + + become: yes + become_user: root