Browse Source
Now someone can override individual endpoints like below in an environment: parameter_merge_strategies: EndpointMap: merge parameter_defaults: EndpointMap: AodhPublic: protocol: http port: 9999 host: 121.21.2.1 Closes-Bug: #1897592 Change-Id: Ibf2e0d183dd51421c4feb7467c3c01fb416d2965changes/37/796137/5
14 changed files with 112 additions and 1426 deletions
@ -1,308 +0,0 @@
|
||||
#!/usr/bin/env python |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may |
||||
# not use this file except in compliance with the License. You may obtain |
||||
# a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT |
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the |
||||
# License for the specific language governing permissions and limitations |
||||
# under the License. |
||||
|
||||
""" |
||||
Generate the endpoint_map.yaml template from data in the endpoint_data.yaml |
||||
file. |
||||
|
||||
By default the files in the same directory as this script are operated on, but |
||||
different files can be optionally specified on the command line. |
||||
|
||||
The --check option verifies that the current output file is up-to-date with the |
||||
latest data in the input file. The script exits with status code 2 if a |
||||
mismatch is detected. |
||||
""" |
||||
import collections |
||||
import copy |
||||
import itertools |
||||
import os |
||||
import sys |
||||
import yaml |
||||
|
||||
|
||||
__all__ = ['load_endpoint_data', 'generate_endpoint_map_template', |
||||
'write_template', 'build_endpoint_map', 'check_up_to_date'] |
||||
|
||||
(IN_FILE, OUT_FILE) = ('endpoint_data.yaml', 'endpoint_map.yaml') |
||||
|
||||
SUBST = (SUBST_IP_ADDRESS, SUBST_CLOUDNAME) = ('IP_ADDRESS', 'CLOUDNAME') |
||||
PARAMS = (PARAM_CLOUD_ENDPOINTS, PARAM_ENDPOINTMAP, PARAM_NETIPMAP, |
||||
PARAM_SERVICENETMAP) = ( |
||||
'CloudEndpoints', 'EndpointMap', 'NetIpMap', 'ServiceNetMap') |
||||
FIELDS = (F_PORT, F_PROTOCOL, F_HOST) = ('port', 'protocol', 'host') |
||||
|
||||
ENDPOINT_TYPES = frozenset(['Internal', 'Public', 'Admin']) |
||||
|
||||
|
||||
def get_file(default_fn, override=None, writable=False): |
||||
if override == '-': |
||||
if writable: |
||||
return sys.stdout |
||||
else: |
||||
return sys.stdin |
||||
|
||||
if override is not None: |
||||
filename = override |
||||
else: |
||||
filename = os.path.join(os.path.dirname(__file__), default_fn) |
||||
|
||||
return open(filename, 'w' if writable else 'r') |
||||
|
||||
|
||||
def load_endpoint_data(infile=None): |
||||
with get_file(IN_FILE, infile) as f: |
||||
return yaml.safe_load(f) |
||||
|
||||
|
||||
def net_param_name(endpoint_type_defn): |
||||
return endpoint_type_defn['net_param'] + 'Network' |
||||
|
||||
|
||||
def endpoint_map_default(config): |
||||
def map_item(ep_name, ep_type, svc): |
||||
values = collections.OrderedDict([ |
||||
(F_PROTOCOL, str(svc[ep_type].get(F_PROTOCOL, |
||||
svc.get(F_PROTOCOL, 'http')))), |
||||
(F_PORT, str(svc[ep_type].get(F_PORT, svc[F_PORT]))), |
||||
(F_HOST, SUBST_IP_ADDRESS), |
||||
]) |
||||
return ep_name + ep_type, values |
||||
|
||||
return collections.OrderedDict(map_item(ep_name, ep_type, svc) |
||||
for ep_name, svc in sorted(config.items()) |
||||
for ep_type in sorted(set(svc) & |
||||
ENDPOINT_TYPES)) |
||||
|
||||
|
||||
def make_parameter(ptype, default, description=None): |
||||
param = collections.OrderedDict([('type', ptype), ('default', default)]) |
||||
if description is not None: |
||||
param['description'] = description |
||||
return param |
||||
|
||||
|
||||
def template_parameters(config): |
||||
params = collections.OrderedDict() |
||||
params[PARAM_NETIPMAP] = make_parameter('json', {}, 'The Net IP map') |
||||
params[PARAM_SERVICENETMAP] = make_parameter('json', {}, |
||||
'The Service Net map') |
||||
params[PARAM_ENDPOINTMAP] = make_parameter('json', |
||||
endpoint_map_default(config), |
||||
'Mapping of service endpoint ' |
||||
'-> protocol. Typically set ' |
||||
'via parameter_defaults in the ' |
||||
'resource registry.') |
||||
|
||||
params[PARAM_CLOUD_ENDPOINTS] = make_parameter( |
||||
'json', |
||||
{}, |
||||
('A map containing the DNS names for the different endpoints ' |
||||
'(external, internal_api, etc.)')) |
||||
return params |
||||
|
||||
|
||||
def template_output_definition(endpoint_name, |
||||
endpoint_variant, |
||||
endpoint_type, |
||||
net_param, |
||||
uri_suffix=None, |
||||
name_override=None): |
||||
def extract_field(field): |
||||
assert field in FIELDS |
||||
return {'get_param': ['EndpointMap', |
||||
endpoint_name + endpoint_type, |
||||
copy.copy(field)]} |
||||
|
||||
port = extract_field(F_PORT) |
||||
protocol = extract_field(F_PROTOCOL) |
||||
host_nobrackets = { |
||||
'str_replace': collections.OrderedDict([ |
||||
('template', extract_field(F_HOST)), |
||||
('params', { |
||||
SUBST_IP_ADDRESS: {'get_param': |
||||
['NetIpMap', |
||||
{'get_param': ['ServiceNetMap', |
||||
net_param]}]}, |
||||
SUBST_CLOUDNAME: {'get_param': |
||||
[PARAM_CLOUD_ENDPOINTS, |
||||
{'get_param': ['ServiceNetMap', |
||||
net_param]}]}, |
||||
}) |
||||
]) |
||||
} |
||||
host = { |
||||
'str_replace': collections.OrderedDict([ |
||||
('template', extract_field(F_HOST)), |
||||
('params', { |
||||
SUBST_IP_ADDRESS: {'get_param': |
||||
['NetIpMap', |
||||
{'str_replace': |
||||
{'template': 'NETWORK_uri', |
||||
'params': {'NETWORK': |
||||
{'get_param': ['ServiceNetMap', |
||||
net_param]}}}}]}, |
||||
SUBST_CLOUDNAME: {'get_param': |
||||
[PARAM_CLOUD_ENDPOINTS, |
||||
{'get_param': ['ServiceNetMap', |
||||
net_param]}]}, |
||||
}) |
||||
]) |
||||
} |
||||
uri_no_path = { |
||||
'make_url': collections.OrderedDict([ |
||||
('scheme', protocol), |
||||
('host', copy.deepcopy(host)), |
||||
('port', port) |
||||
]) |
||||
} |
||||
uri_with_path = copy.deepcopy(uri_no_path) |
||||
if uri_suffix is not None: |
||||
path, pc, suffix = uri_suffix.partition('%') |
||||
uri_with_path['make_url']['path'] = path |
||||
if pc: |
||||
uri_with_path = {'list_join': ['', [uri_with_path, pc + suffix]]} |
||||
|
||||
name = name_override if name_override is not None else (endpoint_name + |
||||
endpoint_variant + |
||||
endpoint_type) |
||||
|
||||
return name, { |
||||
'host_nobrackets': host_nobrackets, |
||||
'host': host, |
||||
'port': extract_field('port'), |
||||
'protocol': extract_field('protocol'), |
||||
'uri': uri_with_path, |
||||
'uri_no_suffix': uri_no_path, |
||||
} |
||||
|
||||
|
||||
def template_endpoint_items(config): |
||||
def get_svc_endpoints(ep_name, svc): |
||||
for ep_type in set(svc) & ENDPOINT_TYPES: |
||||
defn = svc[ep_type] |
||||
for variant, suffix in defn.get('uri_suffixes', |
||||
{'': None}).items(): |
||||
name_override = defn.get('names', {}).get(variant) |
||||
yield template_output_definition(ep_name, variant, ep_type, |
||||
net_param_name(defn), |
||||
suffix, |
||||
name_override) |
||||
return itertools.chain.from_iterable(sorted(get_svc_endpoints(ep_name, |
||||
svc)) |
||||
for (ep_name, |
||||
svc) in sorted(config.items())) |
||||
|
||||
|
||||
def generate_endpoint_map_template(config): |
||||
return collections.OrderedDict([ |
||||
('heat_template_version', 'wallaby'), |
||||
('description', 'A map of OpenStack endpoints. Since the endpoints ' |
||||
'are URLs, we need to have brackets around IPv6 IP addresses. The ' |
||||
'inputs to these parameters come from net_ip_uri_map, which will ' |
||||
'include these brackets in IPv6 addresses.'), |
||||
('parameters', template_parameters(config)), |
||||
('outputs', { |
||||
'endpoint_map': { |
||||
'value': |
||||
collections.OrderedDict(template_endpoint_items(config)) |
||||
} |
||||
}), |
||||
]) |
||||
|
||||
|
||||
autogen_warning = """### DO NOT MODIFY THIS FILE |
||||
### This file is automatically generated from endpoint_data.yaml |
||||
### by the script build_endpoint_map.py |
||||
|
||||
""" |
||||
|
||||
|
||||
class TemplateDumper(yaml.SafeDumper): |
||||
def represent_ordered_dict(self, data): |
||||
return self.represent_dict(data.items()) |
||||
|
||||
|
||||
TemplateDumper.add_representer(collections.OrderedDict, |
||||
TemplateDumper.represent_ordered_dict) |
||||
|
||||
|
||||
def write_template(template, filename=None): |
||||
with get_file(OUT_FILE, filename, writable=True) as f: |
||||
f.write(autogen_warning) |
||||
yaml.dump(template, f, TemplateDumper, width=68) |
||||
|
||||
|
||||
def read_template(template, filename=None): |
||||
with get_file(OUT_FILE, filename) as f: |
||||
return yaml.safe_load(f) |
||||
|
||||
|
||||
def build_endpoint_map(output_filename=None, input_filename=None): |
||||
if output_filename is not None and output_filename == input_filename: |
||||
raise Exception('Cannot read from and write to the same file') |
||||
config = load_endpoint_data(input_filename) |
||||
template = generate_endpoint_map_template(config) |
||||
write_template(template, output_filename) |
||||
|
||||
|
||||
def check_up_to_date(output_filename=None, input_filename=None): |
||||
if output_filename is not None and output_filename == input_filename: |
||||
raise Exception('Input and output filenames must be different') |
||||
config = load_endpoint_data(input_filename) |
||||
template = generate_endpoint_map_template(config) |
||||
existing_template = read_template(output_filename) |
||||
return existing_template == template |
||||
|
||||
|
||||
def get_options(): |
||||
import argparse |
||||
|
||||
parser = argparse.ArgumentParser( |
||||
usage="%(prog)s [-i INPUT_FILE] [-o OUTPUT_FILE] [--check]", |
||||
description=__doc__) |
||||
parser.add_argument('-i', '--input', dest='input_file', action='store', |
||||
default=None, |
||||
help='Specify a different endpoint data file') |
||||
parser.add_argument('-o', '--output', dest='output_file', action='store', |
||||
default=None, |
||||
help='Specify a different endpoint map template file') |
||||
parser.add_argument('-c', '--check', dest='check', action='store_true', |
||||
default=False, help='Check that the output file is ' |
||||
'up to date with the data') |
||||
parser.add_argument('-d', '--debug', dest='debug', action='store_true', |
||||
default=False, help='Print stack traces on error') |
||||
|
||||
return parser.parse_args() |
||||
|
||||
|
||||
def main(): |
||||
args = get_options() |
||||
|
||||
try: |
||||
if args.check: |
||||
if not check_up_to_date(args.output_file, args.input_file): |
||||
print('EndpointMap template does not match input data. Please ' |
||||
'run the build_endpoint_map.py tool to update the ' |
||||
'template.', file=sys.stderr) |
||||
sys.exit(2) |
||||
else: |
||||
build_endpoint_map(args.output_file, args.input_file) |
||||
except Exception as exc: |
||||
if args.debug: |
||||
raise |
||||
print('%s: %s' % (type(exc).__name__, str(exc)), file=sys.stderr) |
||||
sys.exit(1) |
||||
|
||||
|
||||
if __name__ == '__main__': |
||||
main() |
@ -1,353 +0,0 @@
|
||||
# Data in this file is used to generate the endpoint_map.yaml template. |
||||
# Run the script build_endpoint_map.py to regenerate the file. |
||||
|
||||
Aodh: |
||||
Internal: |
||||
net_param: AodhApi |
||||
Public: |
||||
net_param: Public |
||||
Admin: |
||||
net_param: AodhApi |
||||
port: 8042 |
||||
|
||||
Barbican: |
||||
Internal: |
||||
net_param: BarbicanApi |
||||
Public: |
||||
net_param: Public |
||||
Admin: |
||||
net_param: BarbicanApi |
||||
port: 9311 |
||||
|
||||
Designate: |
||||
Internal: |
||||
net_param: DesignateApi |
||||
uri_suffixes: |
||||
'': /v2 |
||||
Public: |
||||
net_param: Public |
||||
uri_suffixes: |
||||
'': /v2 |
||||
Admin: |
||||
net_param: DesignateApi |
||||
uri_suffixes: |
||||
'': /v2 |
||||
port: 9001 |
||||
|
||||
Gnocchi: |
||||
Internal: |
||||
net_param: GnocchiApi |
||||
Public: |
||||
net_param: Public |
||||
Admin: |
||||
net_param: GnocchiApi |
||||
port: 8041 |
||||
|
||||
Cinder: |
||||
Internal: |
||||
net_param: CinderApi |
||||
uri_suffixes: |
||||
'': /v1/%(tenant_id)s |
||||
V2: /v2/%(tenant_id)s |
||||
V3: /v3/%(tenant_id)s |
||||
Public: |
||||
net_param: Public |
||||
uri_suffixes: |
||||
'': /v1/%(tenant_id)s |
||||
V2: /v2/%(tenant_id)s |
||||
V3: /v3/%(tenant_id)s |
||||
Admin: |
||||
net_param: CinderApi |
||||
uri_suffixes: |
||||
'': /v1/%(tenant_id)s |
||||
V2: /v2/%(tenant_id)s |
||||
V3: /v3/%(tenant_id)s |
||||
port: 8776 |
||||
|
||||
Glance: |
||||
Internal: |
||||
net_param: GlanceApi |
||||
Public: |
||||
net_param: Public |
||||
Admin: |
||||
net_param: GlanceApi |
||||
port: 9292 |
||||
|
||||
Mysql: |
||||
Internal: |
||||
net_param: Mysql |
||||
protocol: mysql+pymysql |
||||
port: 3306 |
||||
|
||||
Heat: |
||||
Internal: |
||||
net_param: HeatApi |
||||
uri_suffixes: |
||||
'': /v1/%(tenant_id)s |
||||
Public: |
||||
net_param: Public |
||||
uri_suffixes: |
||||
'': /v1/%(tenant_id)s |
||||
Admin: |
||||
net_param: HeatApi |
||||
uri_suffixes: |
||||
'': /v1/%(tenant_id)s |
||||
port: 8004 |
||||
|
||||
HeatCfn: |
||||
Internal: |
||||
net_param: HeatApi |
||||
uri_suffixes: |
||||
'': /v1 |
||||
Public: |
||||
net_param: Public |
||||
uri_suffixes: |
||||
'': /v1 |
||||
Admin: |
||||
net_param: HeatApi |
||||
uri_suffixes: |
||||
'': /v1 |
||||
port: 8000 |
||||
|
||||
Horizon: |
||||
Public: |
||||
net_param: Public |
||||
uri_suffixes: |
||||
'': /dashboard |
||||
port: 80 |
||||
|
||||
# TODO(ayoung): V3 is a temporary fix. Endpoints should be versionless. |
||||
# Required for https://bugs.launchpad.net/puppet-nova/+bug/1542486 |
||||
Keystone: |
||||
Internal: |
||||
net_param: KeystonePublicApi |
||||
uri_suffixes: |
||||
'': / |
||||
V3: /v3 |
||||
names: |
||||
EC2: KeystoneEC2 |
||||
Public: |
||||
net_param: Public |
||||
uri_suffixes: |
||||
'': / |
||||
V3: /v3 |
||||
Admin: |
||||
net_param: KeystoneAdminApi |
||||
uri_suffixes: |
||||
'': / |
||||
V3: /v3 |
||||
port: 35357 |
||||
port: 5000 |
||||
|
||||
Manila: |
||||
Internal: |
||||
net_param: ManilaApi |
||||
uri_suffixes: |
||||
'': /v2/%(tenant_id)s |
||||
V1: /v1/%(tenant_id)s |
||||
Public: |
||||
net_param: Public |
||||
uri_suffixes: |
||||
'': /v2/%(tenant_id)s |
||||
V1: /v1/%(tenant_id)s |
||||
Admin: |
||||
net_param: ManilaApi |
||||
uri_suffixes: |
||||
'': /v2/%(tenant_id)s |
||||
V1: /v1/%(tenant_id)s |
||||
port: 8786 |
||||
|
||||
MetricsQdr: |
||||
Public: |
||||
net_param: Public |
||||
port: 5666 |
||||
protocol: amqp |
||||
|
||||
Mistral: |
||||
Internal: |
||||
net_param: MistralApi |
||||
uri_suffixes: |
||||
'': /v2 |
||||
Public: |
||||
net_param: Public |
||||
uri_suffixes: |
||||
'': /v2 |
||||
Admin: |
||||
net_param: MistralApi |
||||
uri_suffixes: |
||||
'': /v2 |
||||
port: 8989 |
||||
|
||||
Neutron: |
||||
Internal: |
||||
net_param: NeutronApi |
||||
Public: |
||||
net_param: Public |
||||
Admin: |
||||
net_param: NeutronApi |
||||
port: 9696 |
||||
|
||||
Nova: |
||||
Internal: |
||||
net_param: NovaApi |
||||
uri_suffixes: |
||||
'': /v2.1 |
||||
Public: |
||||
net_param: Public |
||||
uri_suffixes: |
||||
'': /v2.1 |
||||
Admin: |
||||
net_param: NovaApi |
||||
uri_suffixes: |
||||
'': /v2.1 |
||||
port: 8774 |
||||
|
||||
Placement: |
||||
Internal: |
||||
net_param: Placement |
||||
uri_suffixes: |
||||
'': /placement |
||||
Public: |
||||
net_param: Public |
||||
uri_suffixes: |
||||
'': /placement |
||||
Admin: |
||||
net_param: Placement |
||||
uri_suffixes: |
||||
'': /placement |
||||
port: 8778 |
||||
|
||||
NovaVNCProxy: |
||||
Internal: |
||||
net_param: NovaApi |
||||
Public: |
||||
net_param: Public |
||||
Admin: |
||||
net_param: NovaApi |
||||
port: 6080 |
||||
|
||||
Swift: |
||||
Internal: |
||||
net_param: SwiftProxy |
||||
uri_suffixes: |
||||
'': /v1/AUTH_%(tenant_id)s |
||||
S3: |
||||
Public: |
||||
net_param: Public |
||||
uri_suffixes: |
||||
'': /v1/AUTH_%(tenant_id)s |
||||
S3: |
||||
Admin: |
||||
net_param: SwiftProxy |
||||
uri_suffixes: |
||||
'': |
||||
S3: |
||||
port: 8080 |
||||
|
||||
CephDashboard: |
||||
Internal: |
||||
net_param: CephDashboard |
||||
port: 8444 |
||||
|
||||
CephGrafana: |
||||
Internal: |
||||
net_param: CephGrafana |
||||
port: 3100 |
||||
|
||||
CephRgw: |
||||
Internal: |
||||
net_param: CephRgw |
||||
uri_suffixes: |
||||
'': /swift/v1/AUTH_%(project_id)s |
||||
Public: |
||||
net_param: Public |
||||
uri_suffixes: |
||||
'': /swift/v1/AUTH_%(project_id)s |
||||
Admin: |
||||
net_param: CephRgw |
||||
uri_suffixes: |
||||
'': /swift/v1/AUTH_%(project_id)s |
||||
port: 8080 |
||||
|
||||
Ironic: |
||||
Internal: |
||||
net_param: IronicApi |
||||
uri_suffixes: |
||||
'': /v1 |
||||
Public: |
||||
net_param: Public |
||||
uri_suffixes: |
||||
'': /v1 |
||||
Admin: |
||||
net_param: IronicApi |
||||
uri_suffixes: |
||||
'': /v1 |
||||
port: 6385 |
||||
|
||||
IronicInspector: |
||||
Internal: |
||||
net_param: IronicInspector |
||||
Public: |
||||
net_param: Public |
||||
Admin: |
||||
net_param: IronicInspector |
||||
port: 5050 |
||||
|
||||
Zaqar: |
||||
Internal: |
||||
net_param: ZaqarApi |
||||
Public: |
||||
net_param: Public |
||||
Admin: |
||||
net_param: ZaqarApi |
||||
port: 8888 |
||||
|
||||
ZaqarWebSocket: |
||||
Internal: |
||||
net_param: ZaqarApi |
||||
Public: |
||||
net_param: Public |
||||
Admin: |
||||
net_param: ZaqarApi |
||||
port: 9000 |
||||
protocol: ws |
||||
|
||||
Octavia: |
||||
Internal: |
||||
net_param: OctaviaApi |
||||
Public: |
||||
net_param: Public |
||||
Admin: |
||||
net_param: OctaviaApi |
||||
port: 9876 |
||||
|
||||
Ganesha: |
||||
Internal: |
||||
net_param: Ganesha |
||||
protocol: nfs |
||||
port: 2049 |
||||
|
||||
DockerRegistry: |
||||
Internal: |
||||
net_param: DockerRegistry |
||||
port: 8787 |
||||
|
||||
NovaMetadata: |
||||
Internal: |
||||
net_param: NovaMetadata |
||||
port: 8775 |
||||
|
||||
Novajoin: |
||||
Internal: |
||||
net_param: Novajoin |
||||
uri_suffixes: |
||||
'': /v1 |
||||
Public: |
||||
net_param: Public |
||||
uri_suffixes: |
||||
'': /v1 |
||||
Admin: |
||||
net_param: Novajoin |
||||
uri_suffixes: |
||||
'': /v1 |
||||
port: 9090 |