Remove packethost provider

This provider has already been disabled in these config files by
setting max-servers to 0 and diskimages to []. There are errors
in the nodepool logs about not being able to contact this provider
now since it appears disabled/gone, so time to remove the entries.

Change-Id: I2adbb575592eefe6958157d8fb007d0b50998e82
This commit is contained in:
David Shrewsbury 2019-10-17 13:03:07 -04:00
parent e3259b7c64
commit c0278edda7
4 changed files with 0 additions and 287 deletions

View File

@ -24,7 +24,6 @@ create Inap 'inap-*' nodepool-inap.yaml
create Limestone 'limestone-*' nodepool-limestone.yaml
create Linaro 'linaro-*' nodepool-linaro.yaml
create OVH 'ovh-*' nodepool-ovh.yaml
create Packethost 'packethost-*' nodepool-packethost.yaml
create Vexxhost 'vexxhost-*' nodepool-vexxhost.yaml
create Citycloud 'citycloud-*' nodepool-citycloud.yaml
create FortNebula 'fortnebula-*' nodepool-fortnebula.yaml

View File

@ -1,197 +0,0 @@
#
# NOTE: EDIT THE TEMPLATE FILE AND RUN create-nodepool.sh
#
dashboard:
title: 'Nodepool: Packethost'
templating:
- name: region
includeAll: true
multi: true
query: stats.gauges.nodepool.provider.packethost-*
refresh: true
type: query
rows:
- title: Description
height: 150px
panels:
- title: Description
content: |
Packethost Nodepool Status
==========================
This dashboard monitors the status of the nodepool environment for Packethost.
**This dashboard is managed by [Grafyaml](http://docs.openstack.org/infra/system-config/grafyaml.html).**
If you would like to make changes to this dashboard, please see the template in the `grafana` directory in
[project-config](https://opendev.org/openstack/project-config/src/branch/master/grafana/nodepool.template).
type: text
- title: Nodes
showTitle: true
height: 150px
panels:
- title: Building
span: 3
sparkline:
full: true
show: true
targets:
- target: sumSeries(stats.gauges.nodepool.provider.$region.nodes.building)
type: singlestat
valueName: current
- title: Ready
span: 3
sparkline:
full: true
show: true
targets:
- target: sumSeries(stats.gauges.nodepool.provider.$region.nodes.ready)
type: singlestat
valueName: current
- title: In Use
span: 3
sparkline:
full: true
show: true
targets:
- target: sumSeries(stats.gauges.nodepool.provider.$region.nodes.in-use)
type: singlestat
valueName: current
- title: Deleting
span: 3
sparkline:
full: true
show: true
targets:
- target: sumSeries(stats.gauges.nodepool.provider.$region.nodes.deleting)
type: singlestat
valueName: current
- title: Test Nodes
height: 400px
panels:
- title: Test Node History - $region
type: graph
span: 12
stack: true
repeat: region
minSpan: 4
tooltip:
value_type: individual
yaxes:
- label: "nodes"
- show: false
targets:
- target: alias(sumSeries(stats.gauges.nodepool.provider.$region.nodes.building), 'Building')
- target: alias(sumSeries(stats.gauges.nodepool.provider.$region.nodes.ready), 'Available')
- target: alias(sumSeries(stats.gauges.nodepool.provider.$region.nodes.in-use), 'In Use')
- target: alias(sumSeries(stats.gauges.nodepool.provider.$region.nodes.used), 'Used')
- target: alias(sumSeries(stats.gauges.nodepool.provider.$region.nodes.deleting), 'Deleting')
- target: alias(sumSeries(stats.gauges.nodepool.provider.$region.max_servers), 'Max')
seriesOverrides:
- alias: Max
stack: False
- title: Node Launches
showTitle: true
height: 250px
panels:
- title: Ready Node Launch Attempts
type: graph
span: 4
lines: false
bars: true
nullPointMode: null as zero
yaxes:
- label: "events / min"
- show: false
targets:
- target: aliasSub(summarize(stats_counts.nodepool.launch.provider.$region.ready, '1m'), '.*stats_counts.nodepool.launch.provider.(.*).ready.*', '\1')
- title: Time to Ready
type: graph
span: 4
lines: false
bars: true
nullPointMode: null as zero
yaxes:
- label: "time"
format: ms
- show: false
targets:
- target: aliasByNode(stats.timers.nodepool.launch.provider.$region.ready.mean, 5)
- title: Error Node Launch Attempts
type: graph
span: 4
lines: false
bars: true
nullPointMode: null as zero
yaxes:
- label: "events / min"
- show: false
targets:
- target: alias(smartSummarize(sumSeries(stats_counts.nodepool.launch.provider.$region.error.*), '1m'), "All Errors")
- title: API Operations
showTitle: true
height: 250px
panels:
- title: Create Server
type: graph
lines: true
nullPointMode: connected
span: 4
yaxes:
- format: ms
label: Time
- show: false
targets:
- target: aliasByNode(stats.timers.nodepool.task.$region.ComputePostServers.mean, 4)
- title: Get Server
type: graph
span: 4
lines: true
nullPointMode: connected
span: 4
yaxes:
- format: ms
label: Time
- show: false
targets:
- target: aliasByNode(stats.timers.nodepool.task.$region.ComputePostServers.mean, 4)
- title: Delete Server
type: graph
span: 4
lines: true
nullPointMode: connected
span: 4
yaxes:
- format: ms
label: Time
- show: false
targets:
- target: aliasByNode(stats.timers.nodepool.task.$region.ComputePostServers.mean, 4)
- title: List Servers
type: graph
lines: true
nullPointMode: connected
span: 4
yaxes:
- format: ms
label: Time
- show: false
targets:
- target: aliasByNode(stats.timers.nodepool.task.$region.ComputePostServers.mean, 4)
- title: Get Limits
type: graph
lines: true
nullPointMode: connected
span: 4
yaxes:
- format: ms
label: Time
- show: false
targets:
- target: aliasByNode(stats.timers.nodepool.task.$region.ComputePostServers.mean, 4)

View File

@ -411,89 +411,6 @@ providers:
diskimage: centos-7
key-name: infra-root-keys-2018-06-15
- name: packethost-us-west-1
region-name: 'us-west-1'
cloud: packethost
boot-timeout: 120
launch-timeout: 300
clean-floating-ips: true
rate: 0.01
diskimages: *provider_diskimages
pools:
- name: main
max-servers: 0
labels:
- name: centos-7
min-ram: 8000
flavor-name: 'zuul-flavor'
diskimage: centos-7
key-name: infra-root-keys-2018-06-15
- name: centos-8
min-ram: 8000
flavor-name: 'zuul-flavor'
diskimage: centos-8
key-name: infra-root-keys-2018-06-15
- name: debian-buster
min-ram: 8000
flavor-name: 'zuul-flavor'
diskimage: debian-buster
key-name: infra-root-keys-2018-06-15
- name: debian-stretch
min-ram: 8000
flavor-name: 'zuul-flavor'
diskimage: debian-stretch
key-name: infra-root-keys-2018-06-15
- name: fedora-28
min-ram: 8000
flavor-name: 'zuul-flavor'
diskimage: fedora-28
key-name: infra-root-keys-2018-06-15
- name: fedora-29
min-ram: 8000
flavor-name: 'zuul-flavor'
diskimage: fedora-29
key-name: infra-root-keys-2018-06-15
- name: fedora-30
min-ram: 8000
flavor-name: 'zuul-flavor'
diskimage: fedora-30
key-name: infra-root-keys-2018-06-15
- name: gentoo-17-0-systemd
min-ram: 8000
flavor-name: 'zuul-flavor'
diskimage: gentoo-17-0-systemd
key-name: infra-root-keys-2018-06-15
- name: opensuse-150
min-ram: 8000
flavor-name: 'zuul-flavor'
diskimage: opensuse-150
key-name: infra-root-keys-2018-06-15
- name: opensuse-15
min-ram: 8000
flavor-name: 'zuul-flavor'
diskimage: opensuse-15
key-name: infra-root-keys-2018-06-15
- name: opensuse-tumbleweed
min-ram: 8000
flavor-name: 'zuul-flavor'
diskimage: opensuse-tumbleweed
key-name: infra-root-keys-2018-06-15
- name: ubuntu-bionic
min-ram: 8000
flavor-name: 'zuul-flavor'
diskimage: ubuntu-bionic
key-name: infra-root-keys-2018-06-15
- name: ubuntu-trusty
min-ram: 8000
flavor-name: 'zuul-flavor'
diskimage: ubuntu-trusty
key-name: infra-root-keys-2018-06-15
- name: ubuntu-xenial
min-ram: 8000
flavor-name: 'zuul-flavor'
diskimage: ubuntu-xenial
key-name: infra-root-keys-2018-06-15
diskimages:
- name: centos-7
- name: centos-8

View File

@ -112,12 +112,6 @@ providers:
rate: 0.001
diskimages: *provider_diskimages
- name: packethost-us-west-1
region-name: 'us-west-1'
cloud: packethost
rate: 0.01
diskimages: []
- name: fortnebula-regionone
region-name: 'regionOne'
cloud: fortnebula