Remove six library
The six library was used to bridge the py2 > py3 gap. This library is longer required on branches that do not support Python2. Change-Id: Ibebe1ac80342774d47f80f6f5855cd4d064742b6
This commit is contained in:
parent
1cfd09dd1a
commit
d86b063547
@ -12,7 +12,6 @@ oslo.rootwrap>=5.8.0 # Apache-2.0
|
||||
oslo.utils>=3.33.0 # Apache-2.0
|
||||
python-glanceclient>=2.8.0 # Apache-2.0
|
||||
python-ironicclient!=2.5.2,!=2.7.1,!=3.0.0,>=2.3.0 # Apache-2.0
|
||||
six>=1.10.0 # MIT
|
||||
oslo.concurrency>=3.26.0 # Apache-2.0
|
||||
python-ironic-inspector-client>=1.5.0 # Apache-2.0
|
||||
Jinja2>=2.10 # BSD License (3 clause)
|
||||
|
@ -17,9 +17,6 @@
|
||||
import logging
|
||||
import sys
|
||||
|
||||
import six
|
||||
from six import reraise as raise_
|
||||
|
||||
from tripleo_common.i18n import _
|
||||
|
||||
_FATAL_EXCEPTION_FORMAT_ERRORS = False
|
||||
@ -27,7 +24,6 @@ _FATAL_EXCEPTION_FORMAT_ERRORS = False
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@six.python_2_unicode_compatible
|
||||
class TripleoCommonException(Exception):
|
||||
"""Base Tripleo-Common Exception.
|
||||
|
||||
@ -53,7 +49,7 @@ class TripleoCommonException(Exception):
|
||||
{'name': name, 'value': value}) # noqa
|
||||
|
||||
if _FATAL_EXCEPTION_FORMAT_ERRORS:
|
||||
raise_(exc_info[0], exc_info[1], exc_info[2])
|
||||
raise Exception(exc_info[0], exc_info[1], exc_info[2])
|
||||
|
||||
def __str__(self):
|
||||
return self.message
|
||||
|
@ -17,7 +17,6 @@
|
||||
from concurrent import futures
|
||||
import os
|
||||
import pathlib
|
||||
import six
|
||||
import tenacity
|
||||
|
||||
from oslo_concurrency import processutils
|
||||
@ -264,7 +263,7 @@ class BuildahBuilder(base.BaseBuilder):
|
||||
"""
|
||||
|
||||
self.log.debug("Process deps: {}".format(deps))
|
||||
if isinstance(deps, (six.string_types,)):
|
||||
if isinstance(deps, str):
|
||||
if prio_list:
|
||||
prio_list.append(deps)
|
||||
else:
|
||||
@ -287,7 +286,7 @@ class BuildahBuilder(base.BaseBuilder):
|
||||
dep_list = list()
|
||||
dep_rehash_list = list()
|
||||
for item in deps:
|
||||
if isinstance(item, (six.string_types,)):
|
||||
if isinstance(item, str):
|
||||
dep_list.append(item)
|
||||
else:
|
||||
dep_rehash_list.append(item)
|
||||
|
@ -18,7 +18,6 @@ import abc
|
||||
import logging
|
||||
import os
|
||||
import shlex
|
||||
import six
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
@ -30,8 +29,7 @@ if sys.version_info[0] < 3:
|
||||
open = codecs.open
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class ImageBuilder(object):
|
||||
class ImageBuilder(object, metaclass=abc.ABCMeta):
|
||||
"""Base representation of an image building method"""
|
||||
|
||||
@staticmethod
|
||||
|
@ -15,11 +15,11 @@
|
||||
|
||||
import collections
|
||||
import errno
|
||||
from functools import wraps
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import requests
|
||||
import six
|
||||
import shutil
|
||||
|
||||
from oslo_log import log as logging
|
||||
@ -54,7 +54,7 @@ TYPE_MAP_EXTENSION = '.type-map'
|
||||
|
||||
|
||||
def skip_if_exists(f):
|
||||
@six.wraps(f)
|
||||
@wraps(f)
|
||||
def wrapper(*args, **kwargs):
|
||||
try:
|
||||
return f(*args, **kwargs)
|
||||
|
@ -24,8 +24,7 @@ import requests
|
||||
from requests import auth as requests_auth
|
||||
from requests.adapters import HTTPAdapter
|
||||
import shutil
|
||||
import six
|
||||
from six.moves.urllib import parse
|
||||
from urllib.parse import urlparse
|
||||
import socket
|
||||
import subprocess
|
||||
import tempfile
|
||||
@ -282,7 +281,7 @@ class RegistrySessionHelper(object):
|
||||
and request_response.status_code < 400):
|
||||
return request_response
|
||||
# parse the destination location
|
||||
redir_url = parse.urlparse(request_response.headers['Location'])
|
||||
redir_url = urlparse(request_response.headers['Location'])
|
||||
# close the response since we're going to replace it
|
||||
request_response.close()
|
||||
auth_header = request_session.headers.pop('Authorization', None)
|
||||
@ -494,12 +493,12 @@ class ImageUploadManager(BaseImageManager):
|
||||
if not isinstance(cred_entry, dict) or len(cred_entry) != 1:
|
||||
raise TypeError('Credentials entry must be '
|
||||
'a dict with a single item')
|
||||
if not isinstance(registry, six.string_types):
|
||||
if not isinstance(registry, str):
|
||||
raise TypeError('Key must be a registry host string: %s' %
|
||||
registry)
|
||||
username, password = next(iter(cred_entry.items()))
|
||||
if not (isinstance(username, six.string_types) and
|
||||
isinstance(password, six.string_types)):
|
||||
if not (isinstance(username, str) and
|
||||
isinstance(password, str)):
|
||||
raise TypeError('Username and password must be strings: %s' %
|
||||
username)
|
||||
|
||||
@ -1051,7 +1050,7 @@ class BaseImageUploader(object):
|
||||
def _image_to_url(cls, image):
|
||||
if '://' not in image:
|
||||
image = 'docker://' + image
|
||||
url = parse.urlparse(image)
|
||||
url = urlparse(image)
|
||||
return url
|
||||
|
||||
@classmethod
|
||||
@ -1410,8 +1409,8 @@ class PythonImageUploader(BaseImageUploader):
|
||||
LOG.info('[%s] Starting upload image process' % t.image_name)
|
||||
|
||||
source_local = t.source_image.startswith('containers-storage:')
|
||||
target_image_local_url = parse.urlparse('containers-storage:%s' %
|
||||
t.target_image)
|
||||
target_image_local_url = urlparse('containers-storage:%s' %
|
||||
t.target_image)
|
||||
target_username, target_password = self.credentials_for_registry(
|
||||
t.target_image_url.netloc)
|
||||
try:
|
||||
@ -1447,7 +1446,7 @@ class PythonImageUploader(BaseImageUploader):
|
||||
'container.' % t.image_name)
|
||||
|
||||
try:
|
||||
source_local_url = parse.urlparse(t.source_image)
|
||||
source_local_url = urlparse(t.source_image)
|
||||
# Copy from local storage to target registry
|
||||
self._copy_local_to_registry(
|
||||
source_local_url,
|
||||
@ -1677,7 +1676,7 @@ class PythonImageUploader(BaseImageUploader):
|
||||
image, _, tag = image_url.geturl().rpartition(':')
|
||||
for man in manifest.get('manifests', []):
|
||||
# replace image tag with the manifest hash in the list
|
||||
man_url = parse.urlparse('%s@%s' % (image, man['digest']))
|
||||
man_url = urlparse('%s@%s' % (image, man['digest']))
|
||||
self._collect_manifests_layers(
|
||||
man_url, session, manifests_str, layers,
|
||||
multi_arch=False
|
||||
@ -1808,7 +1807,7 @@ class PythonImageUploader(BaseImageUploader):
|
||||
else:
|
||||
if layer_val and known_path:
|
||||
image_ref = target_url.path.split(':')[0][1:]
|
||||
uploaded = parse.urlparse(known_path).scheme
|
||||
uploaded = urlparse(known_path).scheme
|
||||
cls._track_uploaded_layers(
|
||||
layer_val, known_path=known_path, image_ref=image_ref,
|
||||
scope=('remote' if uploaded else 'local'))
|
||||
@ -2193,7 +2192,7 @@ class PythonImageUploader(BaseImageUploader):
|
||||
else:
|
||||
if layer_val and known_path:
|
||||
image_ref = target_url.path.split(':')[0][1:]
|
||||
uploaded = parse.urlparse(known_path).scheme
|
||||
uploaded = urlparse(known_path).scheme
|
||||
cls._track_uploaded_layers(
|
||||
layer_val, known_path=known_path, image_ref=image_ref,
|
||||
scope=('remote' if uploaded else 'local'))
|
||||
@ -2385,7 +2384,7 @@ class PythonImageUploader(BaseImageUploader):
|
||||
config_digest = manifest['config']['digest']
|
||||
|
||||
config_id = '=' + base64.b64encode(
|
||||
six.b(config_digest)).decode("utf-8")
|
||||
config_digest.encode()).decode('utf-8')
|
||||
config_str = cls._containers_file('overlay-images', image_id,
|
||||
config_id)
|
||||
manifest = cls._get_local_layers_manifest(manifest, config_str)
|
||||
@ -2468,7 +2467,7 @@ class PythonImageUploader(BaseImageUploader):
|
||||
if not image:
|
||||
continue
|
||||
LOG.info('[%s] Removing local copy of image' % image)
|
||||
image_url = parse.urlparse('containers-storage:%s' % image)
|
||||
image_url = urlparse('containers-storage:%s' % image)
|
||||
self._delete(image_url)
|
||||
|
||||
def _get_executor(self):
|
||||
|
@ -19,8 +19,7 @@ import json
|
||||
import os
|
||||
import requests
|
||||
import shutil
|
||||
import six
|
||||
from six.moves.urllib.parse import urlparse
|
||||
from urllib.parse import urlparse
|
||||
import tempfile
|
||||
from unittest import mock
|
||||
import zlib
|
||||
@ -78,7 +77,7 @@ class TestImageExport(base.TestCase):
|
||||
)
|
||||
|
||||
def test_export_stream(self):
|
||||
blob_data = six.b('The Blob')
|
||||
blob_data = b'The Blob'
|
||||
blob_compressed = zlib.compress(blob_data)
|
||||
calc_digest = hashlib.sha256()
|
||||
calc_digest.update(blob_compressed)
|
||||
@ -114,7 +113,7 @@ class TestImageExport(base.TestCase):
|
||||
@mock.patch('tripleo_common.image.image_export.open',
|
||||
side_effect=MemoryError())
|
||||
def test_export_stream_memory_error(self, mock_open):
|
||||
blob_data = six.b('The Blob')
|
||||
blob_data = b'The Blob'
|
||||
blob_compressed = zlib.compress(blob_data)
|
||||
calc_digest = hashlib.sha256()
|
||||
calc_digest.update(blob_compressed)
|
||||
@ -129,7 +128,7 @@ class TestImageExport(base.TestCase):
|
||||
target_url, layer, layer_stream, verify_digest=False)
|
||||
|
||||
def test_export_stream_verify_failed(self):
|
||||
blob_data = six.b('The Blob')
|
||||
blob_data = b'The Blob'
|
||||
blob_compressed = zlib.compress(blob_data)
|
||||
calc_digest = hashlib.sha256()
|
||||
calc_digest.update(blob_compressed)
|
||||
|
@ -19,8 +19,7 @@ import json
|
||||
import operator
|
||||
import requests
|
||||
from requests_mock.contrib import fixture as rm_fixture
|
||||
import six
|
||||
from six.moves.urllib.parse import urlparse
|
||||
from urllib.parse import urlparse
|
||||
from unittest import mock
|
||||
import zlib
|
||||
|
||||
@ -32,7 +31,7 @@ from tripleo_common.tests import base
|
||||
from tripleo_common.tests.image import fakes
|
||||
|
||||
|
||||
filedata = six.u(
|
||||
filedata = str(
|
||||
"""container_images:
|
||||
- imagename: docker.io/tripleomaster/heat-docker-agents-centos:latest
|
||||
push_destination: localhost:8787
|
||||
@ -1884,7 +1883,7 @@ class TestPythonImageUploader(base.TestCase):
|
||||
source_session = requests.Session()
|
||||
target_session = requests.Session()
|
||||
|
||||
blob_data = six.b('The Blob')
|
||||
blob_data = b'The Blob'
|
||||
calc_digest = hashlib.sha256()
|
||||
calc_digest.update(blob_data)
|
||||
blob_digest = 'sha256:' + calc_digest.hexdigest()
|
||||
@ -2237,8 +2236,8 @@ class TestPythonImageUploader(base.TestCase):
|
||||
def test_copy_registry_to_local(self, mock_popen, mock_environ):
|
||||
mock_success = mock.Mock()
|
||||
mock_success.communicate.return_value = (
|
||||
six.b('pull complete'),
|
||||
six.b('')
|
||||
b'pull complete',
|
||||
b''
|
||||
)
|
||||
mock_success.returncode = 0
|
||||
|
||||
@ -2274,7 +2273,7 @@ class TestPythonImageUploader(base.TestCase):
|
||||
layer = {'digest': 'sha256:aaaa'}
|
||||
target_session = requests.Session()
|
||||
|
||||
blob_data = six.b('The Blob')
|
||||
blob_data = b'The Blob'
|
||||
calc_digest = hashlib.sha256()
|
||||
calc_digest.update(blob_data)
|
||||
blob_digest = 'sha256:' + calc_digest.hexdigest()
|
||||
|
@ -14,7 +14,6 @@
|
||||
#
|
||||
|
||||
import os
|
||||
import six
|
||||
import sys
|
||||
import tempfile
|
||||
from unittest import mock
|
||||
@ -41,7 +40,7 @@ KB_DEFAULT_PREFIX = kb.CONTAINER_IMAGES_DEFAULTS['name_prefix']
|
||||
KB_DEFAULT_NAMESPACE = kb.CONTAINER_IMAGES_DEFAULTS['namespace']
|
||||
|
||||
|
||||
filedata = six.u("""container_images:
|
||||
filedata = str("""container_images:
|
||||
- imagename: docker.io/tripleomaster/heat-docker-agents-centos:latest
|
||||
image_source: kolla
|
||||
push_destination: localhost:8787
|
||||
@ -60,7 +59,7 @@ filedata = six.u("""container_images:
|
||||
push_destination: localhost:8787
|
||||
""")
|
||||
|
||||
template_filedata = six.u("""
|
||||
template_filedata = str("""
|
||||
container_images_template:
|
||||
- imagename: "{{namespace}}/heat-docker-agents-centos:latest"
|
||||
image_source: kolla
|
||||
|
@ -15,7 +15,7 @@
|
||||
|
||||
import json
|
||||
import os
|
||||
from six.moves import configparser
|
||||
import configparser
|
||||
import shutil
|
||||
import tempfile
|
||||
from unittest import mock
|
||||
|
@ -13,8 +13,6 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from six import iteritems
|
||||
|
||||
from heatclient.common import template_utils
|
||||
|
||||
from tripleo_common import constants
|
||||
@ -36,7 +34,7 @@ def search_stack(stack_data, key_name):
|
||||
if result:
|
||||
return result
|
||||
elif isinstance(stack_data, dict):
|
||||
for k, v in iteritems(stack_data):
|
||||
for k, v in stack_data.items():
|
||||
if k == key_name:
|
||||
return v
|
||||
result = search_stack(v, key_name)
|
||||
|
@ -13,15 +13,14 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
from datetime import datetime
|
||||
from io import StringIO
|
||||
import json
|
||||
import logging
|
||||
import multiprocessing
|
||||
import os
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
import six
|
||||
from six.moves import configparser
|
||||
from six.moves import cStringIO as StringIO
|
||||
import configparser
|
||||
import tempfile
|
||||
import yaml
|
||||
|
||||
@ -165,10 +164,10 @@ def _get_inventory(inventory, work_dir):
|
||||
if not inventory:
|
||||
return None
|
||||
|
||||
if (isinstance(inventory, six.string_types) and
|
||||
if (isinstance(inventory, str) and
|
||||
os.path.exists(inventory)):
|
||||
return inventory
|
||||
if not isinstance(inventory, six.string_types):
|
||||
if not isinstance(inventory, str):
|
||||
inventory = yaml.safe_dump(inventory)
|
||||
|
||||
path = os.path.join(work_dir, 'inventory.yaml')
|
||||
@ -183,7 +182,7 @@ def _get_ssh_private_key(ssh_private_key, work_dir):
|
||||
if not ssh_private_key:
|
||||
return None
|
||||
|
||||
if (isinstance(ssh_private_key, six.string_types) and
|
||||
if (isinstance(ssh_private_key, str) and
|
||||
os.path.exists(ssh_private_key)):
|
||||
os.chmod(ssh_private_key, 0o600)
|
||||
return ssh_private_key
|
||||
@ -201,10 +200,10 @@ def _get_playbook(playbook, work_dir):
|
||||
if not playbook:
|
||||
return None
|
||||
|
||||
if (isinstance(playbook, six.string_types) and
|
||||
if (isinstance(playbook, str) and
|
||||
os.path.exists(playbook)):
|
||||
return playbook
|
||||
if not isinstance(playbook, six.string_types):
|
||||
if not isinstance(playbook, str):
|
||||
playbook = yaml.safe_dump(playbook)
|
||||
|
||||
path = os.path.join(work_dir, 'playbook.yaml')
|
||||
@ -295,7 +294,7 @@ def run_ansible_playbook(playbook, work_dir=None, **kwargs):
|
||||
msg = "extra_env_variables must be a dict"
|
||||
raise RuntimeError(msg)
|
||||
for key, value in extra_env_variables.items():
|
||||
extra_env_variables[key] = six.text_type(value)
|
||||
extra_env_variables[key] = str(value)
|
||||
|
||||
try:
|
||||
ansible_config_path = write_default_ansible_cfg(
|
||||
@ -320,7 +319,7 @@ def run_ansible_playbook(playbook, work_dir=None, **kwargs):
|
||||
'tripleo_dense,tripleo_profile_tasks,tripleo_states',
|
||||
'ANSIBLE_STDOUT_CALLBACK': 'tripleo_dense',
|
||||
'PROFILE_TASKS_TASK_OUTPUT_LIMIT':
|
||||
six.text_type(profile_tasks_limit),
|
||||
str(profile_tasks_limit),
|
||||
})
|
||||
|
||||
if extra_env_variables:
|
||||
|
@ -18,7 +18,6 @@ import logging
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import six
|
||||
import tempfile
|
||||
import warnings
|
||||
import yaml
|
||||
@ -136,7 +135,7 @@ class Config(object):
|
||||
# Filter out boolean value and remove blanks
|
||||
flatten_when = "".join([re.sub(r'\s+', '', x)
|
||||
for x in whenexpr
|
||||
if isinstance(x, six.string_types)])
|
||||
if isinstance(x, str)])
|
||||
# make \|int optional incase forgotten; use only step digit:
|
||||
# ()'s around step|int are also optional
|
||||
steps_found = re.findall(r'\(?step(?:\|int)?\)?==(\d+)',
|
||||
@ -262,7 +261,7 @@ class Config(object):
|
||||
role_data = self.stack_outputs.get('RoleData', {})
|
||||
role_group_vars = self.stack_outputs.get('RoleGroupVars', {})
|
||||
role_host_vars = self.stack_outputs.get('AnsibleHostVarsMap', {})
|
||||
for role_name, role in six.iteritems(role_data):
|
||||
for role_name, role in role_data.items():
|
||||
role_path = os.path.join(config_dir, role_name)
|
||||
self._mkdir(role_path)
|
||||
for config in config_type or role.keys():
|
||||
@ -304,7 +303,7 @@ class Config(object):
|
||||
default_flow_style=False)
|
||||
|
||||
role_config = self.get_role_config()
|
||||
for config_name, config in six.iteritems(role_config):
|
||||
for config_name, config in role_config.items():
|
||||
|
||||
# External tasks are in RoleConfig and not defined per role.
|
||||
# So we don't use the RoleData to create the per step playbooks.
|
||||
@ -512,8 +511,8 @@ class Config(object):
|
||||
|
||||
# If the value is not a string already, pretty print it as a
|
||||
# string so it's rendered in a readable format.
|
||||
if not (isinstance(data, six.text_type) or
|
||||
isinstance(data, six.string_types)):
|
||||
if not (isinstance(data, str) or
|
||||
isinstance(data, str)):
|
||||
data = json.dumps(data, indent=2)
|
||||
|
||||
d['config'] = data
|
||||
|
@ -14,8 +14,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import six
|
||||
|
||||
from keystoneauth1.identity.generic import Token as IdentityToken
|
||||
from keystoneauth1 import loading
|
||||
from keystoneauth1 import session as ks_session
|
||||
@ -220,7 +218,7 @@ def get_endpoint_for_project(ctx, service_name=None, service_type=None,
|
||||
endpoint = None
|
||||
os_actions_endpoint_type = 'public'
|
||||
|
||||
for endpoints in six.itervalues(service_endpoints):
|
||||
for endpoints in service_endpoints.items():
|
||||
for ep in endpoints:
|
||||
# is V3 interface?
|
||||
if 'interface' in ep:
|
||||
|
@ -18,7 +18,6 @@ import logging
|
||||
import re
|
||||
|
||||
from oslo_utils import netutils
|
||||
import six
|
||||
|
||||
from ironicclient import exceptions as ironicexceptions
|
||||
from oslo_concurrency import processutils
|
||||
@ -417,7 +416,7 @@ def register_ironic_node(node, client):
|
||||
|
||||
for field in ('name', 'uuid'):
|
||||
if field in node:
|
||||
create_map.update({field: six.text_type(node[field])})
|
||||
create_map.update({field: str(node[field])})
|
||||
|
||||
conductor_group = node.get("conductor_group")
|
||||
if conductor_group:
|
||||
@ -518,7 +517,7 @@ def _update_or_register_ironic_node(node, node_map, client):
|
||||
|
||||
patched = {}
|
||||
for field, paths in _NON_DRIVER_FIELDS.items():
|
||||
if isinstance(paths, six.string_types):
|
||||
if isinstance(paths, str):
|
||||
paths = [paths]
|
||||
|
||||
if field in node:
|
||||
@ -679,7 +678,7 @@ def validate_nodes(nodes_list):
|
||||
|
||||
def dict_to_capabilities(caps_dict):
|
||||
"""Convert a dictionary into a string with the capabilities syntax."""
|
||||
if isinstance(caps_dict, six.string_types):
|
||||
if isinstance(caps_dict, str):
|
||||
return caps_dict
|
||||
|
||||
# NOTE(dtantsur): sort capabilities so that their order does not change
|
||||
|
@ -13,8 +13,7 @@
|
||||
# under the License.
|
||||
|
||||
import logging
|
||||
|
||||
from six.moves import urllib
|
||||
import urllib
|
||||
|
||||
from tripleo_common import constants
|
||||
from tripleo_common.utils import common as common_utils
|
||||
|
@ -25,7 +25,6 @@ import time
|
||||
import uuid
|
||||
|
||||
import passlib.pwd
|
||||
import six
|
||||
import yaml
|
||||
|
||||
from tripleo_common import constants
|
||||
@ -64,7 +63,7 @@ def generate_passwords(stack_env=None,
|
||||
passwords[name] = create_cephx_key()
|
||||
elif name == "CephClusterFSID":
|
||||
# The FSID must be a UUID
|
||||
passwords[name] = six.text_type(uuid.uuid4())
|
||||
passwords[name] = str(uuid.uuid4())
|
||||
# Since by default passlib.pwd.genword uses all digits and ascii upper
|
||||
# & lowercase letters, it provides ~5.95 entropy per character.
|
||||
# Make the length of the default authkey 4096 bytes, which should give
|
||||
|
@ -13,14 +13,12 @@
|
||||
# under the License.
|
||||
#
|
||||
|
||||
from io import StringIO
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import yaml
|
||||
|
||||
import six
|
||||
from six.moves import cStringIO as StringIO
|
||||
|
||||
from tripleo_common.exception import NotFound
|
||||
from tripleo_common.exception import RoleMetadataError
|
||||
|
||||
@ -156,14 +154,14 @@ def validate_role_yaml(role_data=None, role_path=None):
|
||||
raise RoleMetadataError('Unable to parse role yaml')
|
||||
|
||||
schema = {
|
||||
'name': {'type': six.string_types},
|
||||
'name': {'type': str},
|
||||
'CountDefault': {'type': int},
|
||||
'HostnameFormatDefault': {'type': six.string_types},
|
||||
'HostnameFormatDefault': {'type': str},
|
||||
'disable_constraints': {'type': bool}, # TODO(sbaker) remove in U
|
||||
'upgrade_batch_size': {'type': int},
|
||||
'ServicesDefault': {'type': list},
|
||||
'tags': {'type': list},
|
||||
'description': {'type': six.string_types},
|
||||
'description': {'type': str},
|
||||
'networks': {'type': [list, dict]},
|
||||
'networks_skip_config': {'type': list},
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user