style(pep8): remove identation ignores

This patch set removes few pep8/flake8 ignored rules and implemented
the fix in the code to address those rules.

Change-Id: I2e613acd760818a6e18288d284f6224c38c4353a
Signed-off-by: Tin Lam <tin@irrational.io>
This commit is contained in:
Tin Lam 2018-05-18 15:40:16 -05:00
parent a552bf2a0f
commit 33e2203f5e
15 changed files with 53 additions and 43 deletions

View File

@ -191,7 +191,7 @@ class Manager(object):
try: try:
return ( return (
list(yaml.safe_load_all(body)) list(yaml.safe_load_all(body))
if many else yaml.safe_load(body) if many else yaml.safe_load(body)
) )
except yaml.YAMLError: except yaml.YAMLError:
return None return None

View File

@ -56,10 +56,9 @@ def register_opts(conf):
def list_opts(): def list_opts():
opts = {None: default_opts, opts = {None: default_opts,
barbican_group: barbican_opts + barbican_group: barbican_opts +
ks_loading.get_session_conf_options() + ks_loading.get_session_conf_options() +
ks_loading.get_auth_common_conf_options() + ks_loading.get_auth_common_conf_options() +
ks_loading.get_auth_plugin_conf_options( ks_loading.get_auth_plugin_conf_options('v3password')}
'v3password')}
return opts return opts

View File

@ -145,7 +145,7 @@ class YAMLTranslator(HookableMiddlewareMixin, object):
if requires_content_type: if requires_content_type:
content_type = (req.content_type.split(';', 1)[0].strip() content_type = (req.content_type.split(';', 1)[0].strip()
if req.content_type else '') if req.content_type else '')
if not content_type: if not content_type:
raise falcon.HTTPMissingHeader('Content-Type') raise falcon.HTTPMissingHeader('Content-Type')

View File

@ -127,7 +127,7 @@ def require_unique_document_schema(schema=None):
conflicting_names = [ conflicting_names = [
x.meta for x in documents x.meta for x in documents
if x.meta not in existing_document_names and if x.meta not in existing_document_names and
x.schema.startswith(schema) x.schema.startswith(schema)
] ]
if existing_document_names and conflicting_names: if existing_document_names and conflicting_names:
raise errors.SingletonDocumentConflict( raise errors.SingletonDocumentConflict(
@ -976,7 +976,7 @@ def revision_rollback(revision_id, latest_revision, session=None):
doc_diff = {} doc_diff = {}
for orig_doc in orig_revision['documents']: for orig_doc in orig_revision['documents']:
if ((orig_doc['data_hash'], orig_doc['metadata_hash']) if ((orig_doc['data_hash'], orig_doc['metadata_hash'])
not in latest_revision_hashes): not in latest_revision_hashes):
doc_diff[orig_doc['id']] = True doc_diff[orig_doc['id']] = True
else: else:
doc_diff[orig_doc['id']] = False doc_diff[orig_doc['id']] = False
@ -1036,7 +1036,7 @@ def _get_validation_policies_for_revision(revision_id, session=None):
# Otherwise return early. # Otherwise return early.
LOG.debug('Failed to find a ValidationPolicy for revision ID %s. ' LOG.debug('Failed to find a ValidationPolicy for revision ID %s. '
'Only the "%s" results will be included in the response.', 'Only the "%s" results will be included in the response.',
revision_id, types.DECKHAND_SCHEMA_VALIDATION) revision_id, types.DECKHAND_SCHEMA_VALIDATION)
validation_policies = [] validation_policies = []
return validation_policies return validation_policies

View File

@ -155,8 +155,9 @@ def __build_tables(blob_type_obj, blob_type_list):
ondelete='CASCADE'), ondelete='CASCADE'),
nullable=False) nullable=False)
revision_id = Column( revision_id = Column(
Integer, ForeignKey('revisions.id', ondelete='CASCADE'), Integer,
nullable=False) ForeignKey('revisions.id', ondelete='CASCADE'),
nullable=False)
# Used for documents that haven't changed across revisions but still # Used for documents that haven't changed across revisions but still
# have been carried over into newer revisions. This is necessary in # have been carried over into newer revisions. This is necessary in
# order to roll back to previous revisions or to generate a revision # order to roll back to previous revisions or to generate a revision
@ -167,8 +168,9 @@ def __build_tables(blob_type_obj, blob_type_list):
# still being able to roll back to all the documents that exist in a # still being able to roll back to all the documents that exist in a
# specific revision or generate an accurate revision diff report. # specific revision or generate an accurate revision diff report.
orig_revision_id = Column( orig_revision_id = Column(
Integer, ForeignKey('revisions.id', ondelete='CASCADE'), Integer,
nullable=True) ForeignKey('revisions.id', ondelete='CASCADE'),
nullable=True)
@hybrid_property @hybrid_property
def bucket_name(self): def bucket_name(self):
@ -201,8 +203,9 @@ def __build_tables(blob_type_obj, blob_type_list):
validator = Column(blob_type_obj, nullable=False) validator = Column(blob_type_obj, nullable=False)
errors = Column(blob_type_list, nullable=False, default=[]) errors = Column(blob_type_list, nullable=False, default=[])
revision_id = Column( revision_id = Column(
Integer, ForeignKey('revisions.id', ondelete='CASCADE'), Integer,
nullable=False) ForeignKey('revisions.id', ondelete='CASCADE'),
nullable=False)
this_module = sys.modules[__name__] this_module = sys.modules[__name__]
tables = [Bucket, Document, Revision, RevisionTag, Validation] tables = [Bucket, Document, Revision, RevisionTag, Validation]

View File

@ -186,8 +186,8 @@ class DataSchemaValidator(GenericValidator):
continue continue
if 'data' not in data_schema: if 'data' not in data_schema:
continue continue
schema_prefix, schema_version = _get_schema_parts(data_schema, schema_prefix, schema_version = _get_schema_parts(
'metadata.name') data_schema, 'metadata.name')
schema_map[schema_version].setdefault(schema_prefix, schema_map[schema_version].setdefault(schema_prefix,
data_schema.data) data_schema.data)
@ -308,7 +308,7 @@ class DataSchemaValidator(GenericValidator):
if not schemas_to_use: if not schemas_to_use:
LOG.debug('Document schema %s not recognized by %s. No further ' LOG.debug('Document schema %s not recognized by %s. No further '
'validation required.', document.schema, 'validation required.', document.schema,
self.__class__.__name__) self.__class__.__name__)
for schema in schemas_to_use: for schema in schemas_to_use:
is_builtin_schema = schema not in self._external_data_schemas is_builtin_schema = schema not in self._external_data_schemas

View File

@ -146,7 +146,7 @@ class DocumentLayering(object):
current_parent = self._documents_by_index.get(parent_meta, None) current_parent = self._documents_by_index.get(parent_meta, None)
if current_parent: if current_parent:
if (self._layer_order.index(parent.layer) > if (self._layer_order.index(parent.layer) >
self._layer_order.index(current_parent.layer)): self._layer_order.index(current_parent.layer)):
self._parents[child.meta] = parent.meta self._parents[child.meta] = parent.meta
all_children[child] -= 1 all_children[child] -= 1
else: else:
@ -304,7 +304,7 @@ class DocumentLayering(object):
# document itself then return the parent. # document itself then return the parent.
use_replacement = ( use_replacement = (
parent and parent.has_replacement and parent and parent.has_replacement and
parent.replaced_by is not doc parent.replaced_by is not doc
) )
if use_replacement: if use_replacement:
parent = parent.replaced_by parent = parent.replaced_by
@ -481,7 +481,7 @@ class DocumentLayering(object):
else: else:
substitution_sources = [ substitution_sources = [
d for d in self._documents_by_index.values() d for d in self._documents_by_index.values()
if not d.is_abstract if not d.is_abstract
] ]
substitution_sources = self._calc_replacements_and_substitutions( substitution_sources = self._calc_replacements_and_substitutions(
@ -709,7 +709,7 @@ class DocumentLayering(object):
# Return only concrete documents and non-replacements. # Return only concrete documents and non-replacements.
return [d for d in self._sorted_documents return [d for d in self._sorted_documents
if d.is_abstract is False and d.has_replacement is False] if d.is_abstract is False and d.has_replacement is False]
@property @property
def documents(self): def documents(self):

View File

@ -61,9 +61,9 @@ class SecretsManager(object):
secret_uuid = None secret_uuid = None
return ( return (
isinstance(secret_ref, six.string_types) and isinstance(secret_ref, six.string_types) and
cls._url_re.match(secret_ref) and cls._url_re.match(secret_ref) and
'secrets' in secret_ref and 'secrets' in secret_ref and
uuidutils.is_uuid_like(secret_uuid) uuidutils.is_uuid_like(secret_uuid)
) )
@classmethod @classmethod

View File

@ -214,7 +214,7 @@ class TestDocuments(base.TestDbBase):
# Verify that all the expected documents were deleted. # Verify that all the expected documents were deleted.
self.assertEqual( self.assertEqual(
sorted([(d['metadata']['name'], d['schema']) sorted([(d['metadata']['name'], d['schema'])
for d in created_documents]), for d in created_documents]),
sorted([(d['name'], d['schema']) for d in deleted_documents])) sorted([(d['name'], d['schema']) for d in deleted_documents]))
# Verify that all their attributes have been cleared and that the # Verify that all their attributes have been cleared and that the

View File

@ -916,8 +916,8 @@ class TestDocumentLayering3LayersAbstractConcrete(TestDocumentLayering):
"actions": [{"method": "replace", "path": ".b"}]} "actions": [{"method": "replace", "path": ".b"}]}
} }
doc_factory = factories.DocumentFactory(3, [1, 1, 1]) doc_factory = factories.DocumentFactory(3, [1, 1, 1])
documents = doc_factory.gen_test(mapping, site_abstract=False, documents = doc_factory.gen_test(
region_abstract=False) mapping, site_abstract=False, region_abstract=False)
site_expected = {"a": {"x": 1, "y": 2, "z": 3}, "b": 4} site_expected = {"a": {"x": 1, "y": 2, "z": 3}, "b": 4}
region_expected = {"a": {"x": 1, "y": 2, "z": 3}, "b": 5} region_expected = {"a": {"x": 1, "y": 2, "z": 3}, "b": 5}

View File

@ -34,7 +34,7 @@ class TestDocumentLayeringReplacementNegative(
documents[2]['metadata']['name'] = 'bar' documents[2]['metadata']['name'] = 'bar'
error_re = (r'.*Document replacement requires that both documents ' error_re = (r'.*Document replacement requires that both documents '
'have the same `schema` and `metadata.name`.') 'have the same `schema` and `metadata.name`.')
self.assertRaisesRegexp(errors.InvalidDocumentReplacement, error_re, self.assertRaisesRegexp(errors.InvalidDocumentReplacement, error_re,
self._test_layering, documents) self._test_layering, documents)
@ -44,7 +44,7 @@ class TestDocumentLayeringReplacementNegative(
documents[2]['metadata']['schema'] = 'example/Other/v1' documents[2]['metadata']['schema'] = 'example/Other/v1'
error_re = (r'Document replacement requires that both documents ' error_re = (r'Document replacement requires that both documents '
'have the same `schema` and `metadata.name`.') 'have the same `schema` and `metadata.name`.')
self.assertRaisesRegexp(errors.InvalidDocumentReplacement, error_re, self.assertRaisesRegexp(errors.InvalidDocumentReplacement, error_re,
self._test_layering, documents) self._test_layering, documents)
@ -76,7 +76,7 @@ class TestDocumentLayeringReplacementNegative(
documents[2]['metadata']['layeringDefinition'].pop('parentSelector') documents[2]['metadata']['layeringDefinition'].pop('parentSelector')
error_re = (r'Document replacement requires that the document with ' error_re = (r'Document replacement requires that the document with '
'`replacement: true` have a parent.') '`replacement: true` have a parent.')
self.assertRaisesRegexp(errors.InvalidDocumentReplacement, error_re, self.assertRaisesRegexp(errors.InvalidDocumentReplacement, error_re,
self._test_layering, documents) self._test_layering, documents)
@ -96,6 +96,6 @@ class TestDocumentLayeringReplacementNegative(
documents[3]['metadata']['replacement'] = True documents[3]['metadata']['replacement'] = True
error_re = (r'A replacement document cannot itself be replaced by ' error_re = (r'A replacement document cannot itself be replaced by '
'another document.') 'another document.')
self.assertRaisesRegexp(errors.InvalidDocumentReplacement, error_re, self.assertRaisesRegexp(errors.InvalidDocumentReplacement, error_re,
self._test_layering, documents) self._test_layering, documents)

View File

@ -114,9 +114,10 @@ class TestDocumentLayeringNegative(
'parentSelector'] = parent_selector 'parentSelector'] = parent_selector
layering.DocumentLayering(documents, validate=False) layering.DocumentLayering(documents, validate=False)
self.assertTrue(any('Could not find parent for document' in self.assertTrue(
mock_log.debug.mock_calls[x][1][0]) any('Could not find parent for document' in
for x in range(len(mock_log.debug.mock_calls))) mock_log.debug.mock_calls[x][1][0])
for x in range(len(mock_log.debug.mock_calls)))
mock_log.debug.reset_mock() mock_log.debug.reset_mock()
@mock.patch.object(layering, 'LOG', autospec=True) @mock.patch.object(layering, 'LOG', autospec=True)
@ -129,9 +130,10 @@ class TestDocumentLayeringNegative(
documents[1]['metadata']['labels'] = parent_label documents[1]['metadata']['labels'] = parent_label
layering.DocumentLayering(documents, validate=False) layering.DocumentLayering(documents, validate=False)
self.assertTrue(any('Could not find parent for document' in self.assertTrue(
mock_log.debug.mock_calls[x][1][0]) any('Could not find parent for document' in
for x in range(len(mock_log.debug.mock_calls))) mock_log.debug.mock_calls[x][1][0])
for x in range(len(mock_log.debug.mock_calls)))
mock_log.debug.reset_mock() mock_log.debug.reset_mock()
def test_layering_duplicate_parent_selector_2_layer(self): def test_layering_duplicate_parent_selector_2_layer(self):
@ -168,9 +170,10 @@ class TestDocumentLayeringNegative(
'parentSelector'] = self_ref 'parentSelector'] = self_ref
layering.DocumentLayering(documents, validate=False) layering.DocumentLayering(documents, validate=False)
self.assertTrue(any('Could not find parent for document' in self.assertTrue(
mock_log.debug.mock_calls[x][1][0]) any('Could not find parent for document' in
for x in range(len(mock_log.debug.mock_calls))) mock_log.debug.mock_calls[x][1][0])
for x in range(len(mock_log.debug.mock_calls)))
def test_layering_without_layering_policy_raises_exc(self): def test_layering_without_layering_policy_raises_exc(self):
doc_factory = factories.DocumentFactory(1, [1]) doc_factory = factories.DocumentFactory(1, [1])

View File

@ -108,7 +108,8 @@ class RealPolicyFixture(fixtures.Fixture):
""" """
if not (set(self.expected_policy_actions) == if not (set(self.expected_policy_actions) ==
set(self.actual_policy_actions)): set(self.actual_policy_actions)):
error_msg = ('The expected policy actions passed to ' error_msg = (
'The expected policy actions passed to '
'`self.policy.set_rules` do not match the policy actions ' '`self.policy.set_rules` do not match the policy actions '
'that were actually enforced by Deckhand. Set of expected ' 'that were actually enforced by Deckhand. Set of expected '
'policies %s should be equal to set of actual policies: %s. ' 'policies %s should be equal to set of actual policies: %s. '

View File

@ -0,0 +1,4 @@
---
fixes:
- |
Removed indentation rules E127, E128, E129 and E131 from pep8 exclusion.

View File

@ -96,7 +96,7 @@ commands = flake8 {posargs}
# [H210] Require autospec, spec, or spec_set in mock.patch/mock.patch.object calls # [H210] Require autospec, spec, or spec_set in mock.patch/mock.patch.object calls
# [H904] Delay string interpolations at logging calls. # [H904] Delay string interpolations at logging calls.
enable-extensions = H106,H203,H204,H205,H210,H904 enable-extensions = H106,H203,H204,H205,H210,H904
ignore = E127,E128,E129,E131,H405 ignore = H405
exclude = .venv,.git,.tox,dist,*lib/python*,*egg,build,releasenotes,docs,alembic/versions exclude = .venv,.git,.tox,dist,*lib/python*,*egg,build,releasenotes,docs,alembic/versions
[testenv:docs] [testenv:docs]