Merge "Use project-config from zuul instead of direct clones"

This commit is contained in:
Zuul 2020-04-15 19:16:56 +00:00 committed by Gerrit Code Review
commit c3df2202f6
24 changed files with 97 additions and 104 deletions

View File

@ -1069,6 +1069,9 @@
label: ubuntu-xenial
- name: nb01-test.opendev.org
label: ubuntu-bionic
required-projects:
- openstack/project-config
- opendev/system-config
vars:
run_playbooks:
- playbooks/service-letsencrypt.yaml
@ -1279,6 +1282,9 @@
label: ubuntu-bionic
- name: gitea99.opendev.org
label: ubuntu-bionic
required-projects:
- openstack/project-config
- opendev/system-config
vars:
run_playbooks:
- playbooks/service-letsencrypt.yaml
@ -1384,6 +1390,9 @@
label: ubuntu-xenial
- name: review-dev01.opendev.org
label: ubuntu-xenial
required-projects:
- openstack/project-config
- opendev/system-config
vars:
run_playbooks:
- playbooks/service-letsencrypt.yaml
@ -1460,7 +1469,8 @@
This is a parent job designed to be inherited to enabled
CD deployment of our infrastructure. Set playbook_name to
specify the playbook relative to
bridge.openstack.org:/opt/system-config/playbooks
/home/zuul/src/opendev.org/opendev/system-config/playbooks
on bridge.openstack.org.
abstract: true
semaphore: infra-prod-playbook
run: playbooks/zuul/run-production-playbook.yaml
@ -1557,6 +1567,9 @@
allowed-projects:
- opendev/system-config
- openstack/project-config
required-projects:
- opendev/system-config
- openstack/project-config
vars:
playbook_name: manage-projects.yaml
infra_prod_ansible_forks: 10
@ -1631,6 +1644,9 @@
description: Run service-nodepool.yaml playbook
vars:
playbook_name: service-nodepool.yaml
required-projects:
- opendev/system-config
- openstack/project-config
files:
- inventory/.*
- playbooks/service-nodepool.yaml

View File

@ -1,33 +0,0 @@
# This ansible.cfg file is only for running ad-hoc commands from
# the /opt/system-config checkout. This file should be kept in
# sync with playbooks/roles/install-ansible/templates/ansible.cfg.j2
[defaults]
inventory=/opt/system-config/inventory/openstack.yaml,/opt/system-config/inventory/groups.yaml,/etc/ansible/hosts/emergency.yaml
library=/usr/share/ansible
log_path=/var/log/ansible/ansible.log
inventory_plugins=/opt/system-config/playbooks/roles/install-ansible/files/inventory_plugins/inventory_plugins
roles_path=/opt/system-config/roles:/etc/ansible/roles
retry_files_enabled=False
retry_files_save_path=
gathering=smart
fact_caching=jsonfile
fact_caching_connection=/var/cache/ansible/facts
# Squash warning about ansible auto-transforming group names with -'s in them
force_valid_group_names=ignore
callback_whitelist=profile_tasks, timer
callback_plugins=/etc/ansible/callback_plugins
stdout_callback=debug
[inventory]
enable_plugins=yaml,yamlgroup,advanced_host_list,ini
cache=True
cache_plugin=jsonfile
cache_connection=/var/cache/ansible/inventory
any_unparsed_is_failed=True
[ssh_connection]
retries=3
pipelining = True
[callback_profile_tasks]
task_output_limit = 50

View File

@ -441,7 +441,7 @@ read-write volumes.
.. code-block:: console
root@bridge:~# /opt/system-config/tools/hieraedit.py \
root@bridge:~# /home/zuul/src/opendev.org/opendev/system-config/tools/hieraedit.py \
--yaml /etc/ansible/hosts/host_vars/mirror-update01.opendev.org.yaml \
-f /path/to/foo.keytab KEYNAME

View File

@ -177,8 +177,8 @@ def bootstrap_server(server, key, name, volume_device, keep,
t.start()
inventory_list = (
'/opt/system-config/inventory/openstack.yaml',
'/opt/system-config/inventory/groups.yaml',
'/etc/ansible/hosts/openstack.yaml',
'/etc/ansible/hosts/groups.yaml',
'/etc/ansible/hosts/emergency.yaml',
jobdir.inventory_root,
)

View File

@ -28,6 +28,8 @@ iptables_base_public_udp_ports: []
iptables_extra_public_udp_ports: []
iptables_public_udp_ports: "{{ iptables_base_public_udp_ports + iptables_extra_public_udp_ports }}"
project_config_src: /home/zuul/src/opendev.org/openstack/project-config
# When adding new users, always pick a UID larger than the last UID, do not
# fill in holes in the middle of the range.
all_users:
@ -150,7 +152,7 @@ all_users:
uid: 2030
gid: 2030
zuulcd:
zuul:
comment: Zuul CICD
key: |
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDcXd/QJDEprSLh6N6bULnhchf9M+uzYBEJ2b51Au67FON+5M6VEj5Ut+DlkEPhabOP+tSv9Cn1HpmpBjdEOXdmBj6JS7G/gBb4w28oZDyNjrPT2ebpRw/XnVEkGfikR2J+j3o7CV+ybhLDalXm2TUDReVXnONUq3YzZbjRzoYs0xxrxyss47vZP0xFpsAt9jCMAJW2k6H589VUY38k9LFyhZUZ72FB6eJ68B9GN0TimBYm2DqvupBGQrRhkP8OZ0WoBV8PulKXaHVFdmfBNHB7E7FLlZKuiM6nkV4bOWMGOB/TF++wXBK86t9po3pWCM7+kr72xGRTE+6LuZ2z1K+h zuul-system-config-20180924

View File

@ -5,7 +5,7 @@ puppet_reports: none
manage_config: true
manifest: /opt/system-config/production/manifests/site.pp
manifest_base: /opt/system-config/production
mgmt_manifestpath: /opt/system-config/
mgmt_manifestpath: /home/zuul/src/opendev.org/opendev/system-config/
puppet_logdest: syslog
mgmt_hieradata: /etc/ansible/hosts
mgmt_puppet_module_dir: /etc/puppet/modules

View File

@ -2,4 +2,4 @@ ansible_python_interpreter: python3
bastion_key_exclusive: false
kube_config_template: clouds/bridge_kube_config.yaml.j2
extra_users:
- zuulcd
- zuul

View File

@ -10,6 +10,6 @@ letsencrypt_gid: 3001
gerrit_storyboard_url: https://storyboard-dev.openstack.org
gerrit_vhost_name: review-dev.opendev.org
gerrit_redirect_vhost: review-dev.openstack.org
gerrit_project_config_base: /opt/project-config/dev
gerrit_project_creator_user: openstack-dev-project-creator
gerrit_self_hostkey: '[review-dev.opendev.org]:29418,[review-dev.openstack.org]:29418,[23.253.109.153]:29418,[2001:4800:7819:104:be76:4eff:fe04:8e55]:29418 ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC4J4BJ/C6kl1PcfD5ZdpYIwWXA+vRiB4USncZQHW9+Idtdr4dZRA05RlBAfiTkKKhjarJpt8PQP2hYt8aJL1miZZjp1s05d9mxGVHfoH7Vyg85vhRa7Jg4VZS0cu34R909q23cBcjSNQSyVKP9neOqovoV/DyB8HHEg0kbsOWC3qzdA+6aVdVV7Mtx/0t0MyiTz0xA5ZCRFwF6IuiMPHLNk128qDhjO2UXnrhyP5A7Kl/JHpIWToLKGIorePndFcFyNXlWIhBoQRDcX6FYjPdavjAGlK1S/Jd5DVJ184Z7rEXL682o487c0NQ/lAV4QF3iz0Aw9QRVrUw21xWvfU4R'
project_config_subdir: dev/

View File

@ -1,16 +1,3 @@
# Run on localhost for lookup plugins, on review/review-dev
# because manage-projects runs remotely.
- hosts: "localhost:!disabled"
name: "Clone project-config for projects list"
strategy: free
connection: local
tasks:
- name: Clone project-config repo
git:
repo: https://opendev.org/openstack/project-config
dest: /opt/project-config
force: yes
- hosts: "gitea:!disabled"
name: "Create repos on gitea servers"
strategy: free
@ -21,11 +8,9 @@
- hosts: "review:review-dev:!disabled"
name: "Create repos on gerrit servers"
tasks:
- name: Clone project-config repo
git:
repo: https://opendev.org/openstack/project-config
dest: /opt/project-config
force: yes
- name: Sync project-config
include_role:
name: sync-project-config
- name: Run manage-projects
include_role:
name: gerrit

View File

@ -8,7 +8,7 @@
name: run_cloud_launcher.sh
state: present
disabled: "{{ cloud_launcher_disable_job }}"
job: '/usr/bin/flock -n /var/run/ansible/run_cloud_launcher.lock /bin/bash /opt/system-config/run_cloud_launcher.sh -c >> /var/log/ansible/run_cloud_launcher_cron.log 2>&1'
job: '/usr/bin/flock -n /var/run/ansible/run_cloud_launcher.lock /bin/bash /home/zuul/src/opendev.org/opendev/system-config/run_cloud_launcher.sh -c >> /var/log/ansible/run_cloud_launcher_cron.log 2>&1'
minute: "{{ cloud_launcher_cron_interval.minute }}"
hour: "{{ cloud_launcher_cron_interval.hour }}"
day: "{{ cloud_launcher_cron_interval.day }}"

View File

@ -17,9 +17,8 @@ gerrit_container_volumes:
- /home/gerrit2/review_site/static:/var/gerrit/static
- /home/gerrit2/.launchpadlib:/var/gerrit/.launchpadlib
- /home/gerrit2/.ssh:/var/gerrit/.ssh
- '{{ gerrit_project_config_base }}/gerrit/projects.yaml:/var/gerrit/etc/projects.yaml'
- '{{ gerrit_project_config_base }}/gerrit/projects.ini:/var/gerrit/etc/projects.ini'
- /opt/project-config/gerrit/projects.yaml:/var/gerrit/etc/projects.yaml
- /opt/project-config/gerrit/projects.ini:/var/gerrit/etc/projects.ini
gerrit_database_type: MYSQL
gerrit_project_config_base: /opt/project-config
gerrit_project_creator_user: openstack-project-creator
gerrit_manage_projects_args: "-v"

View File

@ -1,13 +1,6 @@
# TODO(mordred) We should do *something* where this could use a zuul cloned
# copy of project-config instead. This is needed not just for things like
# manage-projects (which could be run completely differently and non-locally)
# but also for things like notify-impact, which is currently run by a gerrit
# hook inside of the container via jeepyb.
- name: Clone project-config repo
git:
repo: https://opendev.org/openstack/project-config
dest: /opt/project-config
force: yes
- name: Sync project-config
include_role:
name: sync-project-config
- name: Ensure /etc/gerrit-compose directory
file:
@ -203,7 +196,7 @@
- name: Copy notify-impact yaml file
copy:
src: "{{ gerrit_project_config_base }}/gerrit/notify_impact.yaml"
src: "/opt/project-config/gerrit/notify_impact.yaml"
dest: "{{ gerrit_site_dir }}/hooks/notify_impact.yaml"
remote_src: yes
owner: "{{ gerrit_user_name }}"

View File

@ -15,9 +15,9 @@
# limitations under the License.
exec docker run --rm --net=host -u root \
-v{{ gerrit_project_config_base }}:/opt/project-config \
-v{{ gerrit_project_config_base }}/gerrit/acls:/home/gerrit2/acls \
-v{{ gerrit_project_config_base }}/gerrit/projects.yaml:/home/gerrit2/projects.yaml \
-v/opt/project-config:/opt/project-config \
-v/opt/project-config/gerrit/acls:/home/gerrit2/acls \
-v/opt/project-config/gerrit/projects.yaml:/home/gerrit2/projects.yaml \
-v/opt/lib/git:/opt/lib/git \
-v/opt/lib/jeepyb:/opt/lib/jeepyb \
-v/home/gerrit2/review_site/etc/ssh_project_rsa_key:/home/gerrit2/review_site/etc/ssh_project_rsa_key \

View File

@ -15,8 +15,8 @@
# limitations under the License.
exec docker run --rm --net=host -u root \
-v{{ gerrit_project_config_base }}:/opt/project-config \
-v{{ gerrit_project_config_base }}/gerrit/projects.yaml:/home/gerrit2/projects.yaml \
-v/opt/project-config:/opt/project-config \
-v/opt/project-config/gerrit/projects.yaml:/home/gerrit2/projects.yaml \
-v/opt/lib/git:/opt/lib/git \
-v/opt/lib/jeepyb:/opt/lib/jeepyb \
-v/home/gerrit2/review_site/etc/ssh_project_rsa_key:/home/gerrit2/review_site/etc/ssh_project_rsa_key \

View File

@ -4,5 +4,5 @@
password: "{{ gitea_root_password }}"
always_update: "{{ gitea_always_update }}"
# Lookup runs locally on the calling machine, so doesn't need
# /opt/project-config remotely
projects: "{{ lookup('file', '/opt/project-config/gerrit/projects.yaml') | from_yaml }}"
# project-config remotely
projects: "{{ lookup('file', project_config_src + '/gerrit/projects.yaml') | from_yaml }}"

View File

@ -18,16 +18,9 @@
name: install-zookeeper
when: nodepool_base_install_zookeeper
# NOTE(ianw) : A note on testing; we have some configurations for
# system-config-run-nodepool test hosts committed to project-config.
# Since this is a protected repo we can't speculatively test, which is
# why we're just cloning from opendev.org master and not a local
# checkout here. We don't expect the configs to change so this is OK.
- name: Clone the project-config repo for configs
git:
repo: 'https://opendev.org/openstack/project-config'
dest: /opt/project-config
force: yes
- name: Sync project-config
include_role:
name: sync-project-config
- name: Create nodepool config dir
file:
@ -52,4 +45,4 @@
file:
state: link
src: /opt/project-config/nodepool/elements
dest: /etc/nodepool/elements
dest: /etc/nodepool/elements

View File

@ -0,0 +1 @@
Sync project-config to remote host

View File

@ -0,0 +1,2 @@
project_config_dest: /opt/project-config
project_config_subdir: ""

View File

@ -0,0 +1,11 @@
- name: Create project-config dir
file:
path: '{{ project_config_dest }}'
state: directory
- name: Sync project-config repo
synchronize:
src: '{{ project_config_src }}/{{ project_config_subdir }}'
dest: '{{ project_config_dest }}'

View File

@ -15,11 +15,6 @@
write_inventory_exclude_hostvars:
- ansible_user
- ansible_python_interpreter
- name: Set up /opt/system-config repo
git:
repo: /home/zuul/src/opendev.org/opendev/system-config
dest: /opt/system-config
force: yes
- name: Add groups config for test nodes
template:
src: "templates/gate-groups.yaml.j2"
@ -73,6 +68,28 @@
- host_vars/nb01-test.opendev.org.yaml
- name: Display group membership
command: ansible localhost -m debug -a 'var=groups'
# In prod, bridge installs a zuul user, but in zuul we already have a zuul user, so we really need
# to not modify it.
- name: Load bridge hostvars
slurp:
path: /home/zuul/src/opendev.org/opendev/system-config/playbooks/host_vars/bridge.openstack.org.yaml
register: bridge_hostvar_content
- name: Parse bridge_hostvars
set_fact:
bridge_hostvars: "{{ bridge_hostvar_content.content | b64decode | from_yaml }}"
- name: Overwrite extra_users
vars:
new_config:
extra_users: []
set_fact:
bridge_hostvars: "{{ bridge_hostvars | combine(new_config) }}"
- name: Save bridge hostvars
copy:
content: "{{ bridge_hostvars | to_nice_yaml }}"
dest: /home/zuul/src/opendev.org/opendev/system-config/playbooks/host_vars/bridge.openstack.org.yaml
become: true
- name: Run base.yaml
command: ansible-playbook -v /home/zuul/src/opendev.org/opendev/system-config/playbooks/base.yaml
- name: Run bridge service playbook

View File

@ -4,7 +4,7 @@
add_host:
name: bridge.openstack.org
ansible_python_interpreter: python3
ansible_user: zuulcd
ansible_user: zuul
- hosts: localhost
tasks:
@ -15,6 +15,13 @@
- hosts: bridge.openstack.org
tasks:
- name: Make sure a manaul maint isn't going on
wait_for:
path: /home/zuul/DISABLE-ANSIBLE
state: absent
sleep: 10
timeout: 3600 # Wait for an hour before bailing
- name: Synchronize src repos to workspace directory.
synchronize:
delete: false
@ -28,11 +35,11 @@
- name: Log a playbook start header
become: yes
shell: 'echo "Running {{ ansible_date_time.iso8601 }}: ansible-playbook -v -f {{ infra_prod_ansible_forks }} /home/zuulcd/src/opendev.org/opendev/system-config/playbooks/{{ playbook_name }}" > /var/log/ansible/{{ playbook_name }}.log'
shell: 'echo "Running {{ ansible_date_time.iso8601 }}: ansible-playbook -v -f {{ infra_prod_ansible_forks }} /home/zuul/src/opendev.org/opendev/system-config/playbooks/{{ playbook_name }}" > /var/log/ansible/{{ playbook_name }}.log'
- name: Run specified playbook on bridge.o.o and redirect output
become: yes
shell: 'ansible-playbook -v -f {{ infra_prod_ansible_forks }} /home/zuulcd/src/opendev.org/opendev/system-config/playbooks/{{ playbook_name }} >> /var/log/ansible/{{ playbook_name }}.log'
shell: 'ansible-playbook -v -f {{ infra_prod_ansible_forks }} /home/zuul/src/opendev.org/opendev/system-config/playbooks/{{ playbook_name }} >> /var/log/ansible/{{ playbook_name }}.log'
always:

View File

@ -19,7 +19,7 @@
# expect.
set -e
export ANSIBLE_LOG_PATH=/var/log/puppet_run_cloud_launcher.log
SYSTEM_CONFIG=/opt/system-config
SYSTEM_CONFIG=/home/zuul/src/opendev.org/opendev/system-config
ANSIBLE_PLAYBOOKS=$SYSTEM_CONFIG/playbooks
# It's possible for connectivity to a server or manifest application to break

View File

@ -79,8 +79,8 @@ def test_kubectl(host):
assert kube.rc == 0
def test_zuulcd_authorized_keys(host):
authorized_keys = host.file('/home/zuulcd/.ssh/authorized_keys')
def test_zuul_authorized_keys(host):
authorized_keys = host.file('/home/zuul/.ssh/authorized_keys')
assert authorized_keys.exists
content = authorized_keys.content.decode('utf8')

View File

@ -16,5 +16,5 @@
for playbook in base.yaml remote_puppet_adhoc.yaml ; do
ansible-playbook -f1 --limit $1 \
/opt/system-config/playbooks/$playbook
/home/zuul/src/opendev.org/opendev/system-config/playbooks/$playbook
done