trivial: Run some files through black
Before major surgery. Change-Id: I367bf6df230262858d1d414afed4a43ccbdce72f Signed-off-by: Stephen Finucane <stephenfin@redhat.com>
This commit is contained in:
parent
da90cdb7a9
commit
9e87611aec
@ -16,6 +16,7 @@ from urllib.parse import urlparse
|
||||
|
||||
try:
|
||||
import simplejson
|
||||
|
||||
JSONDecodeError = simplejson.scanner.JSONDecodeError
|
||||
except ImportError:
|
||||
JSONDecodeError = ValueError
|
||||
@ -38,16 +39,24 @@ from openstack import resource
|
||||
def _check_resource(strict=False):
|
||||
def wrap(method):
|
||||
def check(self, expected, actual=None, *args, **kwargs):
|
||||
if (strict and actual is not None and not
|
||||
isinstance(actual, resource.Resource)):
|
||||
if (
|
||||
strict
|
||||
and actual is not None
|
||||
and not isinstance(actual, resource.Resource)
|
||||
):
|
||||
raise ValueError("A %s must be passed" % expected.__name__)
|
||||
elif (isinstance(actual, resource.Resource) and not
|
||||
isinstance(actual, expected)):
|
||||
raise ValueError("Expected %s but received %s" % (
|
||||
expected.__name__, actual.__class__.__name__))
|
||||
elif isinstance(actual, resource.Resource) and not isinstance(
|
||||
actual, expected
|
||||
):
|
||||
raise ValueError(
|
||||
"Expected %s but received %s"
|
||||
% (expected.__name__, actual.__class__.__name__)
|
||||
)
|
||||
|
||||
return method(self, expected, actual, *args, **kwargs)
|
||||
|
||||
return check
|
||||
|
||||
return wrap
|
||||
|
||||
|
||||
@ -70,14 +79,20 @@ class Proxy(adapter.Adapter):
|
||||
def __init__(
|
||||
self,
|
||||
session,
|
||||
statsd_client=None, statsd_prefix=None,
|
||||
prometheus_counter=None, prometheus_histogram=None,
|
||||
influxdb_config=None, influxdb_client=None,
|
||||
*args, **kwargs):
|
||||
statsd_client=None,
|
||||
statsd_prefix=None,
|
||||
prometheus_counter=None,
|
||||
prometheus_histogram=None,
|
||||
influxdb_config=None,
|
||||
influxdb_client=None,
|
||||
*args,
|
||||
**kwargs
|
||||
):
|
||||
# NOTE(dtantsur): keystoneauth defaults retriable_status_codes to None,
|
||||
# override it with a class-level value.
|
||||
kwargs.setdefault('retriable_status_codes',
|
||||
self.retriable_status_codes)
|
||||
kwargs.setdefault(
|
||||
'retriable_status_codes', self.retriable_status_codes
|
||||
)
|
||||
super(Proxy, self).__init__(session=session, *args, **kwargs)
|
||||
self._statsd_client = statsd_client
|
||||
self._statsd_prefix = statsd_prefix
|
||||
@ -94,12 +109,10 @@ class Proxy(adapter.Adapter):
|
||||
def _get_cache_key_prefix(self, url):
|
||||
"""Calculate cache prefix for the url"""
|
||||
name_parts = self._extract_name(
|
||||
url, self.service_type,
|
||||
self.session.get_project_id())
|
||||
url, self.service_type, self.session.get_project_id()
|
||||
)
|
||||
|
||||
return '.'.join(
|
||||
[self.service_type]
|
||||
+ name_parts)
|
||||
return '.'.join([self.service_type] + name_parts)
|
||||
|
||||
def _invalidate_cache(self, conn, key_prefix):
|
||||
"""Invalidate all cache entries starting with given prefix"""
|
||||
@ -109,10 +122,16 @@ class Proxy(adapter.Adapter):
|
||||
conn._api_cache_keys.remove(k)
|
||||
|
||||
def request(
|
||||
self, url, method, error_message=None,
|
||||
raise_exc=False, connect_retries=1,
|
||||
self,
|
||||
url,
|
||||
method,
|
||||
error_message=None,
|
||||
raise_exc=False,
|
||||
connect_retries=1,
|
||||
global_request_id=None,
|
||||
*args, **kwargs):
|
||||
*args,
|
||||
**kwargs
|
||||
):
|
||||
conn = self._get_connection()
|
||||
if not global_request_id:
|
||||
# Per-request setting should take precedence
|
||||
@ -125,9 +144,7 @@ class Proxy(adapter.Adapter):
|
||||
if conn.cache_enabled:
|
||||
# Construct cache key. It consists of:
|
||||
# service.name_parts.URL.str(kwargs)
|
||||
key = '.'.join(
|
||||
[key_prefix, url, str(kwargs)]
|
||||
)
|
||||
key = '.'.join([key_prefix, url, str(kwargs)])
|
||||
|
||||
# Track cache key for invalidating possibility
|
||||
conn._api_cache_keys.add(key)
|
||||
@ -137,7 +154,8 @@ class Proxy(adapter.Adapter):
|
||||
# Get the object expiration time from config
|
||||
# default to 0 to disable caching for this resource type
|
||||
expiration_time = int(
|
||||
conn._cache_expirations.get(key_prefix, 0))
|
||||
conn._cache_expirations.get(key_prefix, 0)
|
||||
)
|
||||
# Get from cache or execute and cache
|
||||
response = conn._cache.get_or_create(
|
||||
key=key,
|
||||
@ -149,9 +167,9 @@ class Proxy(adapter.Adapter):
|
||||
raise_exc=raise_exc,
|
||||
global_request_id=global_request_id,
|
||||
**kwargs
|
||||
)
|
||||
),
|
||||
expiration_time=expiration_time
|
||||
),
|
||||
expiration_time=expiration_time,
|
||||
)
|
||||
else:
|
||||
# invalidate cache if we send modification request or user
|
||||
@ -159,10 +177,13 @@ class Proxy(adapter.Adapter):
|
||||
self._invalidate_cache(conn, key_prefix)
|
||||
# Pass through the API request bypassing cache
|
||||
response = super(Proxy, self).request(
|
||||
url, method,
|
||||
connect_retries=connect_retries, raise_exc=raise_exc,
|
||||
url,
|
||||
method,
|
||||
connect_retries=connect_retries,
|
||||
raise_exc=raise_exc,
|
||||
global_request_id=global_request_id,
|
||||
**kwargs)
|
||||
**kwargs
|
||||
)
|
||||
|
||||
for h in response.history:
|
||||
self._report_stats(h)
|
||||
@ -178,20 +199,20 @@ class Proxy(adapter.Adapter):
|
||||
|
||||
@functools.lru_cache(maxsize=256)
|
||||
def _extract_name(self, url, service_type=None, project_id=None):
|
||||
'''Produce a key name to use in logging/metrics from the URL path.
|
||||
"""Produce a key name to use in logging/metrics from the URL path.
|
||||
|
||||
We want to be able to logic/metric sane general things, so we pull
|
||||
the url apart to generate names. The function returns a list because
|
||||
there are two different ways in which the elements want to be combined
|
||||
below (one for logging, one for statsd)
|
||||
|
||||
Some examples are likely useful:
|
||||
Some examples are likely useful::
|
||||
|
||||
/servers -> ['servers']
|
||||
/servers/{id} -> ['server']
|
||||
/servers/{id}/os-security-groups -> ['server', 'os-security-groups']
|
||||
/v2.0/networks.json -> ['networks']
|
||||
'''
|
||||
"""
|
||||
|
||||
url_path = urllib.parse.urlparse(url).path.strip()
|
||||
# Remove / from the beginning to keep the list indexes of interesting
|
||||
@ -201,16 +222,19 @@ class Proxy(adapter.Adapter):
|
||||
|
||||
# Special case for neutron, which puts .json on the end of urls
|
||||
if url_path.endswith('.json'):
|
||||
url_path = url_path[:-len('.json')]
|
||||
url_path = url_path[: -len('.json')]
|
||||
|
||||
# Split url into parts and exclude potential project_id in some urls
|
||||
url_parts = [
|
||||
x for x in url_path.split('/') if (
|
||||
x
|
||||
for x in url_path.split('/')
|
||||
if (
|
||||
x != project_id
|
||||
and (
|
||||
not project_id
|
||||
or (project_id and x != 'AUTH_' + project_id)
|
||||
))
|
||||
)
|
||||
)
|
||||
]
|
||||
if url_parts[-1] == 'detail':
|
||||
# Special case detail calls
|
||||
@ -221,9 +245,12 @@ class Proxy(adapter.Adapter):
|
||||
# Strip leading version piece so that
|
||||
# GET /v2.0/networks
|
||||
# returns ['networks']
|
||||
if (url_parts[0]
|
||||
if (
|
||||
url_parts[0]
|
||||
and url_parts[0][0] == 'v'
|
||||
and url_parts[0][1] and url_parts[0][1].isdigit()):
|
||||
and url_parts[0][1]
|
||||
and url_parts[0][1].isdigit()
|
||||
):
|
||||
url_parts = url_parts[1:]
|
||||
name_parts = self._extract_name_consume_url_parts(url_parts)
|
||||
|
||||
@ -242,19 +269,21 @@ class Proxy(adapter.Adapter):
|
||||
return [part for part in name_parts if part]
|
||||
|
||||
def _extract_name_consume_url_parts(self, url_parts):
|
||||
"""Pull out every other URL portion - so that
|
||||
GET /servers/{id}/os-security-groups
|
||||
returns ['server', 'os-security-groups']
|
||||
"""Pull out every other URL portion.
|
||||
|
||||
For example, ``GET /servers/{id}/os-security-groups`` returns
|
||||
``['server', 'os-security-groups']``.
|
||||
"""
|
||||
name_parts = []
|
||||
for idx in range(0, len(url_parts)):
|
||||
if not idx % 2 and url_parts[idx]:
|
||||
# If we are on first segment and it end with 's' stip this 's'
|
||||
# to differentiate LIST and GET_BY_ID
|
||||
if (len(url_parts) > idx + 1
|
||||
if (
|
||||
len(url_parts) > idx + 1
|
||||
and url_parts[idx][-1] == 's'
|
||||
and url_parts[idx][-2:] != 'is'):
|
||||
and url_parts[idx][-2:] != 'is'
|
||||
):
|
||||
name_parts.append(url_parts[idx][:-1])
|
||||
else:
|
||||
name_parts.append(url_parts[idx])
|
||||
@ -276,15 +305,19 @@ class Proxy(adapter.Adapter):
|
||||
if response is not None and not method:
|
||||
method = response.request.method
|
||||
name_parts = [
|
||||
normalize_metric_name(f) for f in
|
||||
self._extract_name(
|
||||
url, self.service_type, self.session.get_project_id())
|
||||
normalize_metric_name(f)
|
||||
for f in self._extract_name(
|
||||
url, self.service_type, self.session.get_project_id()
|
||||
)
|
||||
]
|
||||
key = '.'.join(
|
||||
[self._statsd_prefix,
|
||||
normalize_metric_name(self.service_type), method,
|
||||
'_'.join(name_parts)
|
||||
])
|
||||
[
|
||||
self._statsd_prefix,
|
||||
normalize_metric_name(self.service_type),
|
||||
method,
|
||||
'_'.join(name_parts),
|
||||
]
|
||||
)
|
||||
with self._statsd_client.pipeline() as pipe:
|
||||
if response is not None:
|
||||
duration = int(response.elapsed.total_seconds() * 1000)
|
||||
@ -300,15 +333,17 @@ class Proxy(adapter.Adapter):
|
||||
# We do not want errors in metric reporting ever break client
|
||||
self.log.exception("Exception reporting metrics")
|
||||
|
||||
def _report_stats_prometheus(self, response, url=None, method=None,
|
||||
exc=None):
|
||||
def _report_stats_prometheus(
|
||||
self, response, url=None, method=None, exc=None
|
||||
):
|
||||
if response is not None and not url:
|
||||
url = response.request.url
|
||||
if response is not None and not method:
|
||||
method = response.request.method
|
||||
parsed_url = urlparse(url)
|
||||
endpoint = "{}://{}{}".format(
|
||||
parsed_url.scheme, parsed_url.netloc, parsed_url.path)
|
||||
parsed_url.scheme, parsed_url.netloc, parsed_url.path
|
||||
)
|
||||
if response is not None:
|
||||
labels = dict(
|
||||
method=method,
|
||||
@ -318,10 +353,12 @@ class Proxy(adapter.Adapter):
|
||||
)
|
||||
self._prometheus_counter.labels(**labels).inc()
|
||||
self._prometheus_histogram.labels(**labels).observe(
|
||||
response.elapsed.total_seconds() * 1000)
|
||||
response.elapsed.total_seconds() * 1000
|
||||
)
|
||||
|
||||
def _report_stats_influxdb(self, response, url=None, method=None,
|
||||
exc=None):
|
||||
def _report_stats_influxdb(
|
||||
self, response, url=None, method=None, exc=None
|
||||
):
|
||||
# NOTE(gtema): status_code is saved both as tag and field to give
|
||||
# ability showing it as a value and not only as a legend.
|
||||
# However Influx is not ok with having same name in tags and fields,
|
||||
@ -332,16 +369,16 @@ class Proxy(adapter.Adapter):
|
||||
method = response.request.method
|
||||
tags = dict(
|
||||
method=method,
|
||||
name='_'.join([
|
||||
normalize_metric_name(f) for f in
|
||||
self._extract_name(
|
||||
url, self.service_type,
|
||||
self.session.get_project_id())
|
||||
])
|
||||
name='_'.join(
|
||||
[
|
||||
normalize_metric_name(f)
|
||||
for f in self._extract_name(
|
||||
url, self.service_type, self.session.get_project_id()
|
||||
)
|
||||
fields = dict(
|
||||
attempted=1
|
||||
]
|
||||
),
|
||||
)
|
||||
fields = dict(attempted=1)
|
||||
if response is not None:
|
||||
fields['duration'] = int(response.elapsed.total_seconds() * 1000)
|
||||
tags['status_code'] = str(response.status_code)
|
||||
@ -356,16 +393,14 @@ class Proxy(adapter.Adapter):
|
||||
fields['failed'] = 1
|
||||
if 'additional_metric_tags' in self._influxdb_config:
|
||||
tags.update(self._influxdb_config['additional_metric_tags'])
|
||||
measurement = self._influxdb_config.get(
|
||||
'measurement', 'openstack_api') \
|
||||
if self._influxdb_config else 'openstack_api'
|
||||
measurement = (
|
||||
self._influxdb_config.get('measurement', 'openstack_api')
|
||||
if self._influxdb_config
|
||||
else 'openstack_api'
|
||||
)
|
||||
# Note(gtema) append service name into the measurement name
|
||||
measurement = '%s.%s' % (measurement, self.service_type)
|
||||
data = [dict(
|
||||
measurement=measurement,
|
||||
tags=tags,
|
||||
fields=fields
|
||||
)]
|
||||
data = [dict(measurement=measurement, tags=tags, fields=fields)]
|
||||
try:
|
||||
self._influxdb_client.write_points(data)
|
||||
except Exception:
|
||||
@ -385,8 +420,8 @@ class Proxy(adapter.Adapter):
|
||||
directly on ourselves. Use one of them.
|
||||
"""
|
||||
return getattr(
|
||||
self, '_connection', getattr(
|
||||
self.session, '_sdk_connection', None))
|
||||
self, '_connection', getattr(self.session, '_sdk_connection', None)
|
||||
)
|
||||
|
||||
def _get_resource(self, resource_type, value, **attrs):
|
||||
"""Get a resource object to work on
|
||||
@ -404,15 +439,14 @@ class Proxy(adapter.Adapter):
|
||||
if value is None:
|
||||
# Create a bare resource
|
||||
res = resource_type.new(connection=conn, **attrs)
|
||||
elif (isinstance(value, dict)
|
||||
and not isinstance(value, resource.Resource)):
|
||||
res = resource_type._from_munch(
|
||||
value, connection=conn)
|
||||
elif isinstance(value, dict) and not isinstance(
|
||||
value, resource.Resource
|
||||
):
|
||||
res = resource_type._from_munch(value, connection=conn)
|
||||
res._update(**attrs)
|
||||
elif not isinstance(value, resource_type):
|
||||
# Create from an ID
|
||||
res = resource_type.new(
|
||||
id=value, connection=conn, **attrs)
|
||||
res = resource_type.new(id=value, connection=conn, **attrs)
|
||||
else:
|
||||
# An existing resource instance
|
||||
res = value
|
||||
@ -435,8 +469,7 @@ class Proxy(adapter.Adapter):
|
||||
value = resource.Resource._get_id(parent)
|
||||
return value
|
||||
|
||||
def _find(self, resource_type, name_or_id, ignore_missing=True,
|
||||
**attrs):
|
||||
def _find(self, resource_type, name_or_id, ignore_missing=True, **attrs):
|
||||
"""Find a resource
|
||||
|
||||
:param name_or_id: The name or ID of a resource to find.
|
||||
@ -451,9 +484,9 @@ class Proxy(adapter.Adapter):
|
||||
|
||||
:returns: An instance of ``resource_type`` or None
|
||||
"""
|
||||
return resource_type.find(self, name_or_id,
|
||||
ignore_missing=ignore_missing,
|
||||
**attrs)
|
||||
return resource_type.find(
|
||||
self, name_or_id, ignore_missing=ignore_missing, **attrs
|
||||
)
|
||||
|
||||
# TODO(stephenfin): Update docstring for attrs since it's a lie
|
||||
@_check_resource(strict=False)
|
||||
@ -565,8 +598,15 @@ class Proxy(adapter.Adapter):
|
||||
return resource_type.bulk_create(self, data, base_path=base_path)
|
||||
|
||||
@_check_resource(strict=False)
|
||||
def _get(self, resource_type, value=None, requires_id=True,
|
||||
base_path=None, skip_cache=False, **attrs):
|
||||
def _get(
|
||||
self,
|
||||
resource_type,
|
||||
value=None,
|
||||
requires_id=True,
|
||||
base_path=None,
|
||||
skip_cache=False,
|
||||
**attrs
|
||||
):
|
||||
"""Fetch a resource
|
||||
|
||||
:param resource_type: The type of resource to get.
|
||||
@ -592,13 +632,16 @@ class Proxy(adapter.Adapter):
|
||||
res = self._get_resource(resource_type, value, **attrs)
|
||||
|
||||
return res.fetch(
|
||||
self, requires_id=requires_id, base_path=base_path,
|
||||
self,
|
||||
requires_id=requires_id,
|
||||
base_path=base_path,
|
||||
skip_cache=skip_cache,
|
||||
error_message="No {resource_type} found for {value}".format(
|
||||
resource_type=resource_type.__name__, value=value))
|
||||
resource_type=resource_type.__name__, value=value
|
||||
),
|
||||
)
|
||||
|
||||
def _list(self, resource_type,
|
||||
paginated=True, base_path=None, **attrs):
|
||||
def _list(self, resource_type, paginated=True, base_path=None, **attrs):
|
||||
"""List a resource
|
||||
|
||||
:param resource_type: The type of resource to list. This should
|
||||
@ -622,9 +665,8 @@ class Proxy(adapter.Adapter):
|
||||
the ``resource_type``.
|
||||
"""
|
||||
return resource_type.list(
|
||||
self, paginated=paginated,
|
||||
base_path=base_path,
|
||||
**attrs)
|
||||
self, paginated=paginated, base_path=base_path, **attrs
|
||||
)
|
||||
|
||||
def _head(self, resource_type, value=None, base_path=None, **attrs):
|
||||
"""Retrieve a resource's header
|
||||
@ -652,34 +694,43 @@ class Proxy(adapter.Adapter):
|
||||
def _get_cleanup_dependencies(self):
|
||||
return None
|
||||
|
||||
def _service_cleanup(self, dry_run=True, client_status_queue=None,
|
||||
identified_resources=None, filters=None,
|
||||
resource_evaluation_fn=None):
|
||||
return None
|
||||
|
||||
def _service_cleanup_del_res(self, del_fn, obj, dry_run=True,
|
||||
def _service_cleanup(
|
||||
self,
|
||||
dry_run=True,
|
||||
client_status_queue=None,
|
||||
identified_resources=None,
|
||||
filters=None,
|
||||
resource_evaluation_fn=None):
|
||||
resource_evaluation_fn=None,
|
||||
):
|
||||
return None
|
||||
|
||||
def _service_cleanup_del_res(
|
||||
self,
|
||||
del_fn,
|
||||
obj,
|
||||
dry_run=True,
|
||||
client_status_queue=None,
|
||||
identified_resources=None,
|
||||
filters=None,
|
||||
resource_evaluation_fn=None,
|
||||
):
|
||||
need_delete = False
|
||||
try:
|
||||
if (
|
||||
resource_evaluation_fn
|
||||
and callable(resource_evaluation_fn)
|
||||
):
|
||||
if resource_evaluation_fn and callable(resource_evaluation_fn):
|
||||
# Ask a user-provided evaluation function if we need to delete
|
||||
# the resource
|
||||
need_del = resource_evaluation_fn(obj, filters,
|
||||
identified_resources)
|
||||
need_del = resource_evaluation_fn(
|
||||
obj, filters, identified_resources
|
||||
)
|
||||
if isinstance(need_del, bool):
|
||||
# Just double check function returned bool
|
||||
need_delete = need_del
|
||||
else:
|
||||
need_delete = \
|
||||
need_delete = (
|
||||
self._service_cleanup_resource_filters_evaluation(
|
||||
obj,
|
||||
filters=filters)
|
||||
obj, filters=filters
|
||||
)
|
||||
)
|
||||
|
||||
if need_delete:
|
||||
if client_status_queue:
|
||||
@ -716,8 +767,10 @@ class Proxy(adapter.Adapter):
|
||||
else:
|
||||
# There are filters set, but we can't get required
|
||||
# attribute, so skip the resource
|
||||
self.log.debug('Requested cleanup attribute %s is not '
|
||||
'available on the resource' % k)
|
||||
self.log.debug(
|
||||
'Requested cleanup attribute %s is not '
|
||||
'available on the resource' % k
|
||||
)
|
||||
part_cond.append(False)
|
||||
except Exception:
|
||||
self.log.exception('Error during condition evaluation')
|
||||
|
@ -102,10 +102,20 @@ class _BaseComponent:
|
||||
# only once when the attribute is retrieved in the code.
|
||||
already_warned_deprecation = False
|
||||
|
||||
def __init__(self, name, type=None, default=None, alias=None, aka=None,
|
||||
alternate_id=False, list_type=None, coerce_to_default=False,
|
||||
deprecated=False, deprecation_reason=None,
|
||||
**kwargs):
|
||||
def __init__(
|
||||
self,
|
||||
name,
|
||||
type=None,
|
||||
default=None,
|
||||
alias=None,
|
||||
aka=None,
|
||||
alternate_id=False,
|
||||
list_type=None,
|
||||
coerce_to_default=False,
|
||||
deprecated=False,
|
||||
deprecation_reason=None,
|
||||
**kwargs,
|
||||
):
|
||||
"""A typed descriptor for a component that makes up a Resource
|
||||
|
||||
:param name: The name this component exists as on the server
|
||||
@ -196,8 +206,11 @@ class _BaseComponent:
|
||||
if value and deprecated and not self.already_warned_deprecation:
|
||||
self.already_warned_deprecation = True
|
||||
if not deprecate_reason:
|
||||
LOG.warning("The option [%s] has been deprecated. "
|
||||
"Please avoid using it.", self.name)
|
||||
LOG.warning(
|
||||
"The option [%s] has been deprecated. "
|
||||
"Please avoid using it.",
|
||||
self.name,
|
||||
)
|
||||
else:
|
||||
LOG.warning(deprecate_reason)
|
||||
return value
|
||||
@ -279,8 +292,9 @@ class _ComponentManager(collections.abc.MutableMapping):
|
||||
@property
|
||||
def dirty(self):
|
||||
"""Return a dict of modified attributes"""
|
||||
return dict((key, self.attributes.get(key, None))
|
||||
for key in self._dirty)
|
||||
return dict(
|
||||
(key, self.attributes.get(key, None)) for key in self._dirty
|
||||
)
|
||||
|
||||
def clean(self, only=None):
|
||||
"""Signal that the resource no longer has modified attributes.
|
||||
@ -304,7 +318,6 @@ class _Request:
|
||||
|
||||
|
||||
class QueryParameters:
|
||||
|
||||
def __init__(self, *names, **mappings):
|
||||
"""Create a dict of accepted query parameters
|
||||
|
||||
@ -342,7 +355,8 @@ class QueryParameters:
|
||||
expected_params = list(self._mapping.keys())
|
||||
expected_params.extend(
|
||||
value.get('name', key) if isinstance(value, dict) else value
|
||||
for key, value in self._mapping.items())
|
||||
for key, value in self._mapping.items()
|
||||
)
|
||||
|
||||
if base_path:
|
||||
expected_params += utils.get_string_format_keys(base_path)
|
||||
@ -353,12 +367,14 @@ class QueryParameters:
|
||||
else:
|
||||
if not allow_unknown_params:
|
||||
raise exceptions.InvalidResourceQuery(
|
||||
message="Invalid query params: %s" %
|
||||
",".join(invalid_keys),
|
||||
extra_data=invalid_keys)
|
||||
message="Invalid query params: %s"
|
||||
% ",".join(invalid_keys),
|
||||
extra_data=invalid_keys,
|
||||
)
|
||||
else:
|
||||
known_keys = set(query.keys()).intersection(
|
||||
set(expected_params))
|
||||
set(expected_params)
|
||||
)
|
||||
return {k: query[k] for k in known_keys}
|
||||
|
||||
def _transpose(self, query, resource_type):
|
||||
@ -385,7 +401,8 @@ class QueryParameters:
|
||||
# single-argument (like int) and double-argument type functions.
|
||||
try:
|
||||
provide_resource_type = (
|
||||
len(inspect.getfullargspec(type_).args) > 1)
|
||||
len(inspect.getfullargspec(type_).args) > 1
|
||||
)
|
||||
except TypeError:
|
||||
provide_resource_type = False
|
||||
|
||||
@ -522,26 +539,24 @@ class Resource(dict):
|
||||
self._unknown_attrs_in_body.update(attrs)
|
||||
|
||||
self._body = _ComponentManager(
|
||||
attributes=body,
|
||||
synchronized=_synchronized)
|
||||
attributes=body, synchronized=_synchronized
|
||||
)
|
||||
self._header = _ComponentManager(
|
||||
attributes=header,
|
||||
synchronized=_synchronized)
|
||||
attributes=header, synchronized=_synchronized
|
||||
)
|
||||
self._uri = _ComponentManager(
|
||||
attributes=uri,
|
||||
synchronized=_synchronized)
|
||||
attributes=uri, synchronized=_synchronized
|
||||
)
|
||||
self._computed = _ComponentManager(
|
||||
attributes=computed,
|
||||
synchronized=_synchronized)
|
||||
attributes=computed, synchronized=_synchronized
|
||||
)
|
||||
if self.commit_jsonpatch or self.allow_patch:
|
||||
# We need the original body to compare against
|
||||
if _synchronized:
|
||||
self._original_body = self._body.attributes.copy()
|
||||
elif self.id:
|
||||
# Never record ID as dirty.
|
||||
self._original_body = {
|
||||
self._alternate_id() or 'id': self.id
|
||||
}
|
||||
self._original_body = {self._alternate_id() or 'id': self.id}
|
||||
else:
|
||||
self._original_body = {}
|
||||
if self._store_unknown_attrs_as_properties:
|
||||
@ -568,8 +583,7 @@ class Resource(dict):
|
||||
|
||||
@classmethod
|
||||
def _attributes_iterator(cls, components=tuple([Body, Header])):
|
||||
"""Iterator over all Resource attributes
|
||||
"""
|
||||
"""Iterator over all Resource attributes"""
|
||||
# isinstance stricly requires this to be a tuple
|
||||
# Since we're looking at class definitions we need to include
|
||||
# subclasses, so check the whole MRO.
|
||||
@ -581,33 +595,40 @@ class Resource(dict):
|
||||
def __repr__(self):
|
||||
pairs = [
|
||||
"%s=%s" % (k, v if v is not None else 'None')
|
||||
for k, v in dict(itertools.chain(
|
||||
for k, v in dict(
|
||||
itertools.chain(
|
||||
self._body.attributes.items(),
|
||||
self._header.attributes.items(),
|
||||
self._uri.attributes.items(),
|
||||
self._computed.attributes.items())).items()
|
||||
self._computed.attributes.items(),
|
||||
)
|
||||
).items()
|
||||
]
|
||||
args = ", ".join(pairs)
|
||||
|
||||
return "%s.%s(%s)" % (
|
||||
self.__module__, self.__class__.__name__, args)
|
||||
return "%s.%s(%s)" % (self.__module__, self.__class__.__name__, args)
|
||||
|
||||
def __eq__(self, comparand):
|
||||
"""Return True if another resource has the same contents"""
|
||||
if not isinstance(comparand, Resource):
|
||||
return False
|
||||
return all([
|
||||
return all(
|
||||
[
|
||||
self._body.attributes == comparand._body.attributes,
|
||||
self._header.attributes == comparand._header.attributes,
|
||||
self._uri.attributes == comparand._uri.attributes,
|
||||
self._computed.attributes == comparand._computed.attributes
|
||||
])
|
||||
self._computed.attributes == comparand._computed.attributes,
|
||||
]
|
||||
)
|
||||
|
||||
def warning_if_attribute_deprecated(self, attr, value):
|
||||
if value and self.deprecated:
|
||||
if not self.deprecation_reason:
|
||||
LOG.warning("The option [%s] has been deprecated. "
|
||||
"Please avoid using it.", attr)
|
||||
LOG.warning(
|
||||
"The option [%s] has been deprecated. "
|
||||
"Please avoid using it.",
|
||||
attr,
|
||||
)
|
||||
else:
|
||||
LOG.warning(self.deprecation_reason)
|
||||
|
||||
@ -632,7 +653,8 @@ class Resource(dict):
|
||||
if name in self._attr_aliases:
|
||||
# Hmm - not found. But hey, the alias exists...
|
||||
return object.__getattribute__(
|
||||
self, self._attr_aliases[name])
|
||||
self, self._attr_aliases[name]
|
||||
)
|
||||
if self._allow_unknown_attrs_in_body:
|
||||
# Last chance, maybe it's in body as attribute which isn't
|
||||
# in the mapping at all...
|
||||
@ -661,9 +683,10 @@ class Resource(dict):
|
||||
if component.name == name:
|
||||
warnings.warn(
|
||||
'Access to "%s[%s]" is deprecated. '
|
||||
'Please access using "%s.%s" attribute.' %
|
||||
(self.__class__, name, self.__class__, attr),
|
||||
DeprecationWarning)
|
||||
'Please access using "%s.%s" attribute.'
|
||||
% (self.__class__, name, self.__class__, attr),
|
||||
DeprecationWarning,
|
||||
)
|
||||
return getattr(self, attr)
|
||||
raise KeyError(name)
|
||||
|
||||
@ -684,12 +707,14 @@ class Resource(dict):
|
||||
" dict interface.".format(
|
||||
module=self.__module__,
|
||||
cls=self.__class__.__name__,
|
||||
name=name))
|
||||
name=name,
|
||||
)
|
||||
)
|
||||
|
||||
def _attributes(self, remote_names=False, components=None,
|
||||
include_aliases=True):
|
||||
"""Generate list of supported attributes
|
||||
"""
|
||||
def _attributes(
|
||||
self, remote_names=False, components=None, include_aliases=True
|
||||
):
|
||||
"""Generate list of supported attributes"""
|
||||
attributes = []
|
||||
|
||||
if not components:
|
||||
@ -1029,7 +1054,8 @@ class Resource(dict):
|
||||
components.append(Computed)
|
||||
if not components:
|
||||
raise ValueError(
|
||||
"At least one of `body`, `headers` or `computed` must be True")
|
||||
"At least one of `body`, `headers` or `computed` must be True"
|
||||
)
|
||||
|
||||
# isinstance stricly requires this to be a tuple
|
||||
components = tuple(components)
|
||||
@ -1059,7 +1085,8 @@ class Resource(dict):
|
||||
for raw in value:
|
||||
if isinstance(raw, Resource):
|
||||
converted.append(
|
||||
raw.to_dict(_to_munch=_to_munch))
|
||||
raw.to_dict(_to_munch=_to_munch)
|
||||
)
|
||||
elif isinstance(raw, dict) and _to_munch:
|
||||
converted.append(munch.Munch(raw))
|
||||
else:
|
||||
@ -1078,8 +1105,11 @@ class Resource(dict):
|
||||
def _to_munch(self, original_names=True):
|
||||
"""Convert this resource into a Munch compatible with shade."""
|
||||
return self.to_dict(
|
||||
body=True, headers=False,
|
||||
original_names=original_names, _to_munch=True)
|
||||
body=True,
|
||||
headers=False,
|
||||
original_names=original_names,
|
||||
_to_munch=True,
|
||||
)
|
||||
|
||||
def _unpack_properties_to_resource_root(self, body):
|
||||
if not body:
|
||||
@ -1111,16 +1141,16 @@ class Resource(dict):
|
||||
original_body = self._original_body
|
||||
else:
|
||||
new = self._unpack_properties_to_resource_root(
|
||||
self._body.attributes)
|
||||
self._body.attributes
|
||||
)
|
||||
original_body = self._unpack_properties_to_resource_root(
|
||||
self._original_body)
|
||||
self._original_body
|
||||
)
|
||||
|
||||
# NOTE(gtema) sort result, since we might need validate it in tests
|
||||
body = sorted(
|
||||
list(jsonpatch.make_patch(
|
||||
original_body,
|
||||
new).patch),
|
||||
key=operator.itemgetter('path')
|
||||
list(jsonpatch.make_patch(original_body, new).patch),
|
||||
key=operator.itemgetter('path'),
|
||||
)
|
||||
else:
|
||||
if not self._store_unknown_attrs_as_properties:
|
||||
@ -1128,7 +1158,8 @@ class Resource(dict):
|
||||
body = self._body.dirty
|
||||
else:
|
||||
body = self._unpack_properties_to_resource_root(
|
||||
self._body.dirty)
|
||||
self._body.dirty
|
||||
)
|
||||
|
||||
if prepend_key and self.resource_key is not None:
|
||||
body = {self.resource_key: body}
|
||||
@ -1176,7 +1207,8 @@ class Resource(dict):
|
||||
if requires_id:
|
||||
if self.id is None:
|
||||
raise exceptions.InvalidRequest(
|
||||
"Request requires an ID but none was found")
|
||||
"Request requires an ID but none was found"
|
||||
)
|
||||
|
||||
uri = utils.urljoin(uri, self.id)
|
||||
|
||||
@ -1217,7 +1249,8 @@ class Resource(dict):
|
||||
self._unknown_attrs_in_body.update(body)
|
||||
elif self._store_unknown_attrs_as_properties:
|
||||
body_attrs = self._pack_attrs_under_properties(
|
||||
body_attrs, body)
|
||||
body_attrs, body
|
||||
)
|
||||
|
||||
self._body.attributes.update(body_attrs)
|
||||
self._body.clean()
|
||||
@ -1256,7 +1289,8 @@ class Resource(dict):
|
||||
raise ValueError(
|
||||
"The session argument to Resource methods requires either an"
|
||||
" instance of an openstack.proxy.Proxy object or at the very least"
|
||||
" a raw keystoneauth1.adapter.Adapter.")
|
||||
" a raw keystoneauth1.adapter.Adapter."
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _get_microversion_for_list(cls, session):
|
||||
@ -1278,8 +1312,9 @@ class Resource(dict):
|
||||
if session.default_microversion:
|
||||
return session.default_microversion
|
||||
|
||||
return utils.maximum_supported_microversion(session,
|
||||
cls._max_microversion)
|
||||
return utils.maximum_supported_microversion(
|
||||
session, cls._max_microversion
|
||||
)
|
||||
|
||||
def _get_microversion_for(self, session, action):
|
||||
"""Get microversion to use for the given action.
|
||||
@ -1298,7 +1333,11 @@ class Resource(dict):
|
||||
return self._get_microversion_for_list(session)
|
||||
|
||||
def _assert_microversion_for(
|
||||
self, session, action, expected, error_message=None,
|
||||
self,
|
||||
session,
|
||||
action,
|
||||
expected,
|
||||
error_message=None,
|
||||
):
|
||||
"""Enforce that the microversion for action satisfies the requirement.
|
||||
|
||||
@ -1311,6 +1350,7 @@ class Resource(dict):
|
||||
:raises: :exc:`~openstack.exceptions.NotSupported` if the version
|
||||
used for the action is lower than the expected one.
|
||||
"""
|
||||
|
||||
def _raise(message):
|
||||
if error_message:
|
||||
error_message.rstrip('.')
|
||||
@ -1323,16 +1363,19 @@ class Resource(dict):
|
||||
if expected is None:
|
||||
return actual
|
||||
elif actual is None:
|
||||
message = ("API version %s is required, but the default "
|
||||
"version will be used.") % expected
|
||||
message = (
|
||||
"API version %s is required, but the default "
|
||||
"version will be used."
|
||||
) % expected
|
||||
_raise(message)
|
||||
|
||||
actual_n = discover.normalize_version_number(actual)
|
||||
expected_n = discover.normalize_version_number(expected)
|
||||
if actual_n < expected_n:
|
||||
message = ("API version %(expected)s is required, but %(actual)s "
|
||||
"will be used.") % {'expected': expected,
|
||||
'actual': actual}
|
||||
message = (
|
||||
"API version %(expected)s is required, but %(actual)s "
|
||||
"will be used."
|
||||
) % {'expected': expected, 'actual': actual}
|
||||
_raise(message)
|
||||
|
||||
return actual
|
||||
@ -1357,33 +1400,51 @@ class Resource(dict):
|
||||
|
||||
session = self._get_session(session)
|
||||
microversion = self._get_microversion_for(session, 'create')
|
||||
requires_id = (self.create_requires_id
|
||||
requires_id = (
|
||||
self.create_requires_id
|
||||
if self.create_requires_id is not None
|
||||
else self.create_method == 'PUT')
|
||||
else self.create_method == 'PUT'
|
||||
)
|
||||
|
||||
if self.create_exclude_id_from_body:
|
||||
self._body._dirty.discard("id")
|
||||
|
||||
if self.create_method == 'PUT':
|
||||
request = self._prepare_request(requires_id=requires_id,
|
||||
request = self._prepare_request(
|
||||
requires_id=requires_id,
|
||||
prepend_key=prepend_key,
|
||||
base_path=base_path)
|
||||
response = session.put(request.url,
|
||||
json=request.body, headers=request.headers,
|
||||
microversion=microversion, params=params)
|
||||
base_path=base_path,
|
||||
)
|
||||
response = session.put(
|
||||
request.url,
|
||||
json=request.body,
|
||||
headers=request.headers,
|
||||
microversion=microversion,
|
||||
params=params,
|
||||
)
|
||||
elif self.create_method == 'POST':
|
||||
request = self._prepare_request(requires_id=requires_id,
|
||||
request = self._prepare_request(
|
||||
requires_id=requires_id,
|
||||
prepend_key=prepend_key,
|
||||
base_path=base_path)
|
||||
response = session.post(request.url,
|
||||
json=request.body, headers=request.headers,
|
||||
microversion=microversion, params=params)
|
||||
base_path=base_path,
|
||||
)
|
||||
response = session.post(
|
||||
request.url,
|
||||
json=request.body,
|
||||
headers=request.headers,
|
||||
microversion=microversion,
|
||||
params=params,
|
||||
)
|
||||
else:
|
||||
raise exceptions.ResourceFailure(
|
||||
"Invalid create method: %s" % self.create_method)
|
||||
"Invalid create method: %s" % self.create_method
|
||||
)
|
||||
|
||||
has_body = (self.has_body if self.create_returns_body is None
|
||||
else self.create_returns_body)
|
||||
has_body = (
|
||||
self.has_body
|
||||
if self.create_returns_body is None
|
||||
else self.create_returns_body
|
||||
)
|
||||
self.microversion = microversion
|
||||
self._translate_response(response, has_body=has_body)
|
||||
# direct comparision to False since we need to rule out None
|
||||
@ -1420,22 +1481,28 @@ class Resource(dict):
|
||||
if not cls.allow_create:
|
||||
raise exceptions.MethodNotSupported(cls, "create")
|
||||
|
||||
if not (data and isinstance(data, list)
|
||||
and all([isinstance(x, dict) for x in data])):
|
||||
if not (
|
||||
data
|
||||
and isinstance(data, list)
|
||||
and all([isinstance(x, dict) for x in data])
|
||||
):
|
||||
raise ValueError('Invalid data passed: %s' % data)
|
||||
|
||||
session = cls._get_session(session)
|
||||
microversion = cls._get_microversion_for(cls, session, 'create')
|
||||
requires_id = (cls.create_requires_id
|
||||
requires_id = (
|
||||
cls.create_requires_id
|
||||
if cls.create_requires_id is not None
|
||||
else cls.create_method == 'PUT')
|
||||
else cls.create_method == 'PUT'
|
||||
)
|
||||
if cls.create_method == 'PUT':
|
||||
method = session.put
|
||||
elif cls.create_method == 'POST':
|
||||
method = session.post
|
||||
else:
|
||||
raise exceptions.ResourceFailure(
|
||||
"Invalid create method: %s" % cls.create_method)
|
||||
"Invalid create method: %s" % cls.create_method
|
||||
)
|
||||
|
||||
body = []
|
||||
resources = []
|
||||
@ -1447,15 +1514,21 @@ class Resource(dict):
|
||||
# to return newly created resource objects.
|
||||
resource = cls.new(connection=session._get_connection(), **attrs)
|
||||
resources.append(resource)
|
||||
request = resource._prepare_request(requires_id=requires_id,
|
||||
base_path=base_path)
|
||||
request = resource._prepare_request(
|
||||
requires_id=requires_id, base_path=base_path
|
||||
)
|
||||
body.append(request.body)
|
||||
|
||||
if prepend_key:
|
||||
body = {cls.resources_key: body}
|
||||
|
||||
response = method(request.url, json=body, headers=request.headers,
|
||||
microversion=microversion, params=params)
|
||||
response = method(
|
||||
request.url,
|
||||
json=body,
|
||||
headers=request.headers,
|
||||
microversion=microversion,
|
||||
params=params,
|
||||
)
|
||||
exceptions.raise_from_response(response)
|
||||
data = response.json()
|
||||
|
||||
@ -1465,14 +1538,22 @@ class Resource(dict):
|
||||
if not isinstance(data, list):
|
||||
data = [data]
|
||||
|
||||
has_body = (cls.has_body if cls.create_returns_body is None
|
||||
else cls.create_returns_body)
|
||||
has_body = (
|
||||
cls.has_body
|
||||
if cls.create_returns_body is None
|
||||
else cls.create_returns_body
|
||||
)
|
||||
if has_body and cls.create_returns_body is False:
|
||||
return (r.fetch(session) for r in resources)
|
||||
else:
|
||||
return (cls.existing(microversion=microversion,
|
||||
return (
|
||||
cls.existing(
|
||||
microversion=microversion,
|
||||
connection=session._get_connection(),
|
||||
**res_dict) for res_dict in data)
|
||||
**res_dict,
|
||||
)
|
||||
for res_dict in data
|
||||
)
|
||||
|
||||
def fetch(
|
||||
self,
|
||||
@ -1505,13 +1586,17 @@ class Resource(dict):
|
||||
if not self.allow_fetch:
|
||||
raise exceptions.MethodNotSupported(self, "fetch")
|
||||
|
||||
request = self._prepare_request(requires_id=requires_id,
|
||||
base_path=base_path)
|
||||
request = self._prepare_request(
|
||||
requires_id=requires_id, base_path=base_path
|
||||
)
|
||||
session = self._get_session(session)
|
||||
microversion = self._get_microversion_for(session, 'fetch')
|
||||
response = session.get(request.url, microversion=microversion,
|
||||
response = session.get(
|
||||
request.url,
|
||||
microversion=microversion,
|
||||
params=params,
|
||||
skip_cache=skip_cache)
|
||||
skip_cache=skip_cache,
|
||||
)
|
||||
kwargs = {}
|
||||
if error_message:
|
||||
kwargs['error_message'] = error_message
|
||||
@ -1541,8 +1626,7 @@ class Resource(dict):
|
||||
|
||||
session = self._get_session(session)
|
||||
microversion = self._get_microversion_for(session, 'fetch')
|
||||
response = session.head(request.url,
|
||||
microversion=microversion)
|
||||
response = session.head(request.url, microversion=microversion)
|
||||
|
||||
self.microversion = microversion
|
||||
self._translate_response(response, has_body=False)
|
||||
@ -1551,8 +1635,9 @@ class Resource(dict):
|
||||
@property
|
||||
def requires_commit(self):
|
||||
"""Whether the next commit() call will do anything."""
|
||||
return (self._body.dirty or self._header.dirty
|
||||
or self.allow_empty_commit)
|
||||
return (
|
||||
self._body.dirty or self._header.dirty or self.allow_empty_commit
|
||||
)
|
||||
|
||||
def commit(
|
||||
self,
|
||||
@ -1596,14 +1681,19 @@ class Resource(dict):
|
||||
if self.commit_jsonpatch:
|
||||
kwargs['patch'] = True
|
||||
|
||||
request = self._prepare_request(prepend_key=prepend_key,
|
||||
base_path=base_path,
|
||||
**kwargs)
|
||||
request = self._prepare_request(
|
||||
prepend_key=prepend_key, base_path=base_path, **kwargs
|
||||
)
|
||||
microversion = self._get_microversion_for(session, 'commit')
|
||||
|
||||
return self._commit(session, request, self.commit_method, microversion,
|
||||
return self._commit(
|
||||
session,
|
||||
request,
|
||||
self.commit_method,
|
||||
microversion,
|
||||
has_body=has_body,
|
||||
retry_on_conflict=retry_on_conflict)
|
||||
retry_on_conflict=retry_on_conflict,
|
||||
)
|
||||
|
||||
def _commit(
|
||||
self,
|
||||
@ -1629,11 +1719,16 @@ class Resource(dict):
|
||||
call = getattr(session, method.lower())
|
||||
except AttributeError:
|
||||
raise exceptions.ResourceFailure(
|
||||
"Invalid commit method: %s" % method)
|
||||
"Invalid commit method: %s" % method
|
||||
)
|
||||
|
||||
response = call(request.url, json=request.body,
|
||||
headers=request.headers, microversion=microversion,
|
||||
**kwargs)
|
||||
response = call(
|
||||
request.url,
|
||||
json=request.body,
|
||||
headers=request.headers,
|
||||
microversion=microversion,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
self.microversion = microversion
|
||||
self._translate_response(response, has_body=has_body)
|
||||
@ -1708,15 +1803,21 @@ class Resource(dict):
|
||||
if not self.allow_patch:
|
||||
raise exceptions.MethodNotSupported(self, "patch")
|
||||
|
||||
request = self._prepare_request(prepend_key=prepend_key,
|
||||
base_path=base_path, patch=True)
|
||||
request = self._prepare_request(
|
||||
prepend_key=prepend_key, base_path=base_path, patch=True
|
||||
)
|
||||
microversion = self._get_microversion_for(session, 'patch')
|
||||
if patch:
|
||||
request.body += self._convert_patch(patch)
|
||||
|
||||
return self._commit(session, request, 'PATCH', microversion,
|
||||
return self._commit(
|
||||
session,
|
||||
request,
|
||||
'PATCH',
|
||||
microversion,
|
||||
has_body=has_body,
|
||||
retry_on_conflict=retry_on_conflict)
|
||||
retry_on_conflict=retry_on_conflict,
|
||||
)
|
||||
|
||||
def delete(self, session, error_message=None, **kwargs):
|
||||
"""Delete the remote resource based on this instance.
|
||||
@ -1750,8 +1851,8 @@ class Resource(dict):
|
||||
microversion = self._get_microversion_for(session, 'delete')
|
||||
|
||||
return session.delete(
|
||||
request.url, headers=request.headers,
|
||||
microversion=microversion)
|
||||
request.url, headers=request.headers, microversion=microversion
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def list(
|
||||
@ -1802,8 +1903,10 @@ class Resource(dict):
|
||||
if base_path is None:
|
||||
base_path = cls.base_path
|
||||
params = cls._query_mapping._validate(
|
||||
params, base_path=base_path,
|
||||
allow_unknown_params=allow_unknown_params)
|
||||
params,
|
||||
base_path=base_path,
|
||||
allow_unknown_params=allow_unknown_params,
|
||||
)
|
||||
query_params = cls._query_mapping._transpose(params, cls)
|
||||
uri = base_path % params
|
||||
uri_params = {}
|
||||
@ -1812,10 +1915,7 @@ class Resource(dict):
|
||||
|
||||
for k, v in params.items():
|
||||
# We need to gather URI parts to set them on the resource later
|
||||
if (
|
||||
hasattr(cls, k)
|
||||
and isinstance(getattr(cls, k), URI)
|
||||
):
|
||||
if hasattr(cls, k) and isinstance(getattr(cls, k), URI):
|
||||
uri_params[k] = v
|
||||
|
||||
# Track the total number of resources yielded so we can paginate
|
||||
@ -1827,7 +1927,8 @@ class Resource(dict):
|
||||
uri,
|
||||
headers={"Accept": "application/json"},
|
||||
params=query_params.copy(),
|
||||
microversion=microversion)
|
||||
microversion=microversion,
|
||||
)
|
||||
exceptions.raise_from_response(response)
|
||||
data = response.json()
|
||||
|
||||
@ -1857,14 +1958,16 @@ class Resource(dict):
|
||||
value = cls.existing(
|
||||
microversion=microversion,
|
||||
connection=session._get_connection(),
|
||||
**raw_resource)
|
||||
**raw_resource,
|
||||
)
|
||||
marker = value.id
|
||||
yield value
|
||||
total_yielded += 1
|
||||
|
||||
if resources and paginated:
|
||||
uri, next_params = cls._get_next_link(
|
||||
uri, response, data, marker, limit, total_yielded)
|
||||
uri, response, data, marker, limit, total_yielded
|
||||
)
|
||||
try:
|
||||
if next_params['marker'] == last_marker:
|
||||
# If next page marker is same as what we were just
|
||||
@ -1905,7 +2008,7 @@ class Resource(dict):
|
||||
# Glance has a next field in the main body
|
||||
next_link = next_link or data.get('next')
|
||||
if next_link and next_link.startswith('/v'):
|
||||
next_link = next_link[next_link.find('/', 1) + 1:]
|
||||
next_link = next_link[next_link.find('/', 1) + 1 :]
|
||||
|
||||
if not next_link and 'next' in response.links:
|
||||
# RFC5988 specifies Link headers and requests parses them if they
|
||||
@ -1956,7 +2059,7 @@ class Resource(dict):
|
||||
the_result = maybe_result
|
||||
else:
|
||||
msg = "More than one %s exists with the name '%s'."
|
||||
msg = (msg % (cls.__name__, name_or_id))
|
||||
msg = msg % (cls.__name__, name_or_id)
|
||||
raise exceptions.DuplicateResource(msg)
|
||||
|
||||
return the_result
|
||||
@ -1998,9 +2101,8 @@ class Resource(dict):
|
||||
# Try to short-circuit by looking directly for a matching ID.
|
||||
try:
|
||||
match = cls.existing(
|
||||
id=name_or_id,
|
||||
connection=session._get_connection(),
|
||||
**params)
|
||||
id=name_or_id, connection=session._get_connection(), **params
|
||||
)
|
||||
return match.fetch(session, **params)
|
||||
except (exceptions.NotFoundException, exceptions.BadRequestException):
|
||||
# NOTE(gtema): There are few places around openstack that return
|
||||
@ -2010,8 +2112,10 @@ class Resource(dict):
|
||||
if list_base_path:
|
||||
params['base_path'] = list_base_path
|
||||
|
||||
if ('name' in cls._query_mapping._mapping.keys()
|
||||
and 'name' not in params):
|
||||
if (
|
||||
'name' in cls._query_mapping._mapping.keys()
|
||||
and 'name' not in params
|
||||
):
|
||||
params['name'] = name_or_id
|
||||
|
||||
data = cls.list(session, **params)
|
||||
@ -2023,7 +2127,8 @@ class Resource(dict):
|
||||
if ignore_missing:
|
||||
return None
|
||||
raise exceptions.ResourceNotFound(
|
||||
"No %s found for %s" % (cls.__name__, name_or_id))
|
||||
"No %s found for %s" % (cls.__name__, name_or_id)
|
||||
)
|
||||
|
||||
|
||||
def _normalize_status(status):
|
||||
@ -2076,18 +2181,20 @@ def wait_for_status(
|
||||
failures = [f.lower() for f in failures]
|
||||
name = "{res}:{id}".format(res=resource.__class__.__name__, id=resource.id)
|
||||
msg = "Timeout waiting for {name} to transition to {status}".format(
|
||||
name=name, status=status)
|
||||
name=name, status=status
|
||||
)
|
||||
|
||||
for count in utils.iterate_timeout(
|
||||
timeout=wait,
|
||||
message=msg,
|
||||
wait=interval):
|
||||
timeout=wait, message=msg, wait=interval
|
||||
):
|
||||
resource = resource.fetch(session, skip_cache=True)
|
||||
|
||||
if not resource:
|
||||
raise exceptions.ResourceFailure(
|
||||
"{name} went away while waiting for {status}".format(
|
||||
name=name, status=status))
|
||||
name=name, status=status
|
||||
)
|
||||
)
|
||||
|
||||
new_status = getattr(resource, attribute)
|
||||
normalized_status = _normalize_status(new_status)
|
||||
@ -2096,10 +2203,17 @@ def wait_for_status(
|
||||
elif normalized_status in failures:
|
||||
raise exceptions.ResourceFailure(
|
||||
"{name} transitioned to failure state {status}".format(
|
||||
name=name, status=new_status))
|
||||
name=name, status=new_status
|
||||
)
|
||||
)
|
||||
|
||||
LOG.debug('Still waiting for resource %s to reach state %s, '
|
||||
'current state is %s', name, status, new_status)
|
||||
LOG.debug(
|
||||
'Still waiting for resource %s to reach state %s, '
|
||||
'current state is %s',
|
||||
name,
|
||||
status,
|
||||
new_status,
|
||||
)
|
||||
|
||||
|
||||
def wait_for_delete(session, resource, interval, wait):
|
||||
@ -2120,9 +2234,10 @@ def wait_for_delete(session, resource, interval, wait):
|
||||
for count in utils.iterate_timeout(
|
||||
timeout=wait,
|
||||
message="Timeout waiting for {res}:{id} to delete".format(
|
||||
res=resource.__class__.__name__,
|
||||
id=resource.id),
|
||||
wait=interval):
|
||||
res=resource.__class__.__name__, id=resource.id
|
||||
),
|
||||
wait=interval,
|
||||
):
|
||||
try:
|
||||
resource = resource.fetch(session, skip_cache=True)
|
||||
if not resource:
|
||||
|
3
tox.ini
3
tox.ini
@ -110,12 +110,13 @@ application-import-names = openstack
|
||||
# The following are ignored on purpose. It's not super worth it to fix them.
|
||||
# However, if you feel strongly about it, patches will be accepted to fix them
|
||||
# if they fix ALL of the occurances of one and only one of them.
|
||||
# E203 Black will put spaces after colons in list comprehensions
|
||||
# H238 New Style Classes are the default in Python3
|
||||
# H4 Are about docstrings and there's just a huge pile of pre-existing issues.
|
||||
# W503 Is supposed to be off by default but in the latest pycodestyle isn't.
|
||||
# Also, both openstacksdk and Donald Knuth disagree with the rule. Line
|
||||
# breaks should occur before the binary operator for readability.
|
||||
ignore = H238,H4,W503
|
||||
ignore = E203, H238, H4, W503
|
||||
import-order-style = pep8
|
||||
show-source = True
|
||||
exclude = .venv,.git,.tox,dist,doc,*lib/python*,*egg,build,openstack/_services_mixin.py
|
||||
|
Loading…
Reference in New Issue
Block a user