Standardize Pegleg code with YAPF

This patch addresses inconsistent code style and enforces it with a
gate for future submissions.

Separate work will be done in the future to address several of the
PEP8 ignores for docstrings, and attempt to bring the tests directory
to PEP8 compliance.

This patch:
1. Updates .style.yapf to set the knobs desired for YAPF.
2. Updates tox.ini to allow one of the knobs to work.
3. Removes unused code from several __init__.py files.
4. Updates the YAPF version in test-requirements.txt to latest (this
   is needed for several knobs to work).
5. Stylistic changes to the python codebase in Pegleg.
6. Updates to tox.ini to run YAPF during PEP8 check.

Change-Id: Ieaa0fdef2b601d01c875d64b840986e54df73abf
This commit is contained in:
Alexander Hughes 2019-06-07 21:03:09 -04:00
parent 5c0a3bef03
commit 1c8d92ef6b
45 changed files with 1218 additions and 1071 deletions

View File

@ -2,4 +2,9 @@
based_on_style = pep8 based_on_style = pep8
spaces_before_comment = 2 spaces_before_comment = 2
column_limit = 79 column_limit = 79
split_before_logical_operator = false blank_line_before_nested_class_or_def = false
blank_line_before_module_docstring = true
split_before_logical_operator = true
split_before_first_argument = true
allow_split_before_dict_value = false
split_before_arithmetic_operator = true

View File

@ -63,7 +63,8 @@ lint: py_lint
# Perform auto formatting # Perform auto formatting
.PHONY: format .PHONY: format
format: py_format format:
tox -e fmt
_BASE_IMAGE_ARG := $(if $(BASE_IMAGE),--build-arg FROM="${BASE_IMAGE}" ,) _BASE_IMAGE_ARG := $(if $(BASE_IMAGE),--build-arg FROM="${BASE_IMAGE}" ,)
@ -107,8 +108,4 @@ clean:
.PHONY: py_lint .PHONY: py_lint
py_lint: py_lint:
tox -e pep8 tox -e pep8
.PHONY: py_format
py_format:
tox -e fmt

View File

@ -126,11 +126,12 @@ SITE_REPOSITORY_ARGUMENT = click.argument(
@click.group(context_settings=CONTEXT_SETTINGS) @click.group(context_settings=CONTEXT_SETTINGS)
@click.option('-v', @click.option(
'--verbose', '-v',
is_flag=True, '--verbose',
default=False, is_flag=True,
help='Enable debug logging') default=False,
help='Enable debug logging')
def main(*, verbose): def main(*, verbose):
"""Main CLI meta-group, which includes the following groups: """Main CLI meta-group, which includes the following groups:
@ -167,19 +168,17 @@ def repo(*, site_repository, clone_path, repo_key, repo_username):
config.set_umask() config.set_umask()
def _lint_helper(*, def _lint_helper(
fail_on_missing_sub_src, *, fail_on_missing_sub_src, exclude_lint, warn_lint, site_name=None):
exclude_lint,
warn_lint,
site_name=None):
"""Helper for executing lint on specific site or all sites in repo.""" """Helper for executing lint on specific site or all sites in repo."""
if site_name: if site_name:
func = functools.partial(engine.lint.site, site_name=site_name) func = functools.partial(engine.lint.site, site_name=site_name)
else: else:
func = engine.lint.full func = engine.lint.full
warns = func(fail_on_missing_sub_src=fail_on_missing_sub_src, warns = func(
exclude_lint=exclude_lint, fail_on_missing_sub_src=fail_on_missing_sub_src,
warn_lint=warn_lint) exclude_lint=exclude_lint,
warn_lint=warn_lint)
if warns: if warns:
click.echo("Linting passed, but produced some warnings.") click.echo("Linting passed, but produced some warnings.")
for w in warns: for w in warns:
@ -194,9 +193,10 @@ def lint_repo(*, fail_on_missing_sub_src, exclude_lint, warn_lint):
"""Lint all sites using checks defined in :mod:`pegleg.engine.errorcodes`. """Lint all sites using checks defined in :mod:`pegleg.engine.errorcodes`.
""" """
engine.repository.process_site_repository(update_config=True) engine.repository.process_site_repository(update_config=True)
_lint_helper(fail_on_missing_sub_src=fail_on_missing_sub_src, _lint_helper(
exclude_lint=exclude_lint, fail_on_missing_sub_src=fail_on_missing_sub_src,
warn_lint=warn_lint) exclude_lint=exclude_lint,
warn_lint=warn_lint)
@main.group(help='Commands related to sites') @main.group(help='Commands related to sites')
@ -205,8 +205,9 @@ def lint_repo(*, fail_on_missing_sub_src, exclude_lint, warn_lint):
@EXTRA_REPOSITORY_OPTION @EXTRA_REPOSITORY_OPTION
@REPOSITORY_USERNAME_OPTION @REPOSITORY_USERNAME_OPTION
@REPOSITORY_KEY_OPTION @REPOSITORY_KEY_OPTION
def site(*, site_repository, clone_path, extra_repositories, repo_key, def site(
repo_username): *, site_repository, clone_path, extra_repositories, repo_key,
repo_username):
"""Group for site-level actions, which include: """Group for site-level actions, which include:
* list: list available sites in a manifests repo * list: list available sites in a manifests repo
@ -225,11 +226,12 @@ def site(*, site_repository, clone_path, extra_repositories, repo_key,
@site.command(help='Output complete config for one site') @site.command(help='Output complete config for one site')
@click.option('-s', @click.option(
'--save-location', '-s',
'save_location', '--save-location',
help='Directory to output the complete site definition. Created ' 'save_location',
'automatically if it does not already exist.') help='Directory to output the complete site definition. Created '
'automatically if it does not already exist.')
@click.option( @click.option(
'--validate/--no-validate', '--validate/--no-validate',
'validate', 'validate',
@ -246,11 +248,12 @@ def site(*, site_repository, clone_path, extra_repositories, repo_key,
multiple=True, multiple=True,
help='Excludes specified linting checks. Warnings will still be issued. ' help='Excludes specified linting checks. Warnings will still be issued. '
'-w takes priority over -x.') '-w takes priority over -x.')
@click.option('-w', @click.option(
'--warn', '-w',
'warn_lint', '--warn',
multiple=True, 'warn_lint',
help='Warn if linting check fails. -w takes priority over -x.') multiple=True,
help='Warn if linting check fails. -w takes priority over -x.')
@SITE_REPOSITORY_ARGUMENT @SITE_REPOSITORY_ARGUMENT
def collect(*, save_location, validate, exclude_lint, warn_lint, site_name): def collect(*, save_location, validate, exclude_lint, warn_lint, site_name):
"""Collects documents into a single site-definition.yaml file, which """Collects documents into a single site-definition.yaml file, which
@ -265,10 +268,11 @@ def collect(*, save_location, validate, exclude_lint, warn_lint, site_name):
""" """
if validate: if validate:
# Lint the primary repo prior to document collection. # Lint the primary repo prior to document collection.
_lint_helper(site_name=site_name, _lint_helper(
fail_on_missing_sub_src=True, site_name=site_name,
exclude_lint=exclude_lint, fail_on_missing_sub_src=True,
warn_lint=warn_lint) exclude_lint=exclude_lint,
warn_lint=warn_lint)
engine.site.collect(site_name, save_location) engine.site.collect(site_name, save_location)
@ -312,10 +316,11 @@ def lint_site(*, fail_on_missing_sub_src, exclude_lint, warn_lint, site_name):
"""Lint a given site using checks defined in """Lint a given site using checks defined in
:mod:`pegleg.engine.errorcodes`. :mod:`pegleg.engine.errorcodes`.
""" """
_lint_helper(site_name=site_name, _lint_helper(
fail_on_missing_sub_src=fail_on_missing_sub_src, site_name=site_name,
exclude_lint=exclude_lint, fail_on_missing_sub_src=fail_on_missing_sub_src,
warn_lint=warn_lint) exclude_lint=exclude_lint,
warn_lint=warn_lint)
def collection_default_callback(ctx, param, value): def collection_default_callback(ctx, param, value):
@ -327,14 +332,16 @@ def collection_default_callback(ctx, param, value):
@site.command('upload', help='Upload documents to Shipyard') @site.command('upload', help='Upload documents to Shipyard')
# Keystone authentication parameters # Keystone authentication parameters
@click.option('--os-project-domain-name', @click.option(
envvar='OS_PROJECT_DOMAIN_NAME', '--os-project-domain-name',
required=False, envvar='OS_PROJECT_DOMAIN_NAME',
default='default') required=False,
@click.option('--os-user-domain-name', default='default')
envvar='OS_USER_DOMAIN_NAME', @click.option(
required=False, '--os-user-domain-name',
default='default') envvar='OS_USER_DOMAIN_NAME',
required=False,
default='default')
@click.option('--os-project-name', envvar='OS_PROJECT_NAME', required=False) @click.option('--os-project-name', envvar='OS_PROJECT_NAME', required=False)
@click.option('--os-username', envvar='OS_USERNAME', required=False) @click.option('--os-username', envvar='OS_USERNAME', required=False)
@click.option('--os-password', envvar='OS_PASSWORD', required=False) @click.option('--os-password', envvar='OS_PASSWORD', required=False)
@ -362,16 +369,18 @@ def collection_default_callback(ctx, param, value):
'collection does not already exist in the Shipyard buffer.\n' 'collection does not already exist in the Shipyard buffer.\n'
'replace: Clear the Shipyard Buffer before adding the specified ' 'replace: Clear the Shipyard Buffer before adding the specified '
'collection.\n') 'collection.\n')
@click.option('--collection', @click.option(
'collection', '--collection',
help='Specifies the name to use for the uploaded collection. ' 'collection',
'Defaults to the specified `site_name`.', help='Specifies the name to use for the uploaded collection. '
callback=collection_default_callback) 'Defaults to the specified `site_name`.',
callback=collection_default_callback)
@SITE_REPOSITORY_ARGUMENT @SITE_REPOSITORY_ARGUMENT
@click.pass_context @click.pass_context
def upload(ctx, *, os_project_domain_name, os_user_domain_name, def upload(
os_project_name, os_username, os_password, os_auth_url, ctx, *, os_project_domain_name, os_user_domain_name, os_project_name,
os_auth_token, context_marker, site_name, buffer_mode, collection): os_username, os_password, os_auth_url, os_auth_token, context_marker,
site_name, buffer_mode, collection):
if not ctx.obj: if not ctx.obj:
ctx.obj = {} ctx.obj = {}
@ -415,12 +424,13 @@ def secrets():
'for tracking provenance information in the PeglegManagedDocuments. ' 'for tracking provenance information in the PeglegManagedDocuments. '
'An attempt is made to automatically determine this value, ' 'An attempt is made to automatically determine this value, '
'but should be provided.') 'but should be provided.')
@click.option('-d', @click.option(
'--days', '-d',
'days', '--days',
default=365, 'days',
show_default=True, default=365,
help='Duration in days generated certificates should be valid.') show_default=True,
help='Duration in days generated certificates should be valid.')
@click.argument('site_name') @click.argument('site_name')
def generate_pki(site_name, author, days): def generate_pki(site_name, author, days):
"""Generate certificates, certificate authorities and keypairs for a given """Generate certificates, certificate authorities and keypairs for a given
@ -429,9 +439,8 @@ def generate_pki(site_name, author, days):
""" """
engine.repository.process_repositories(site_name, overwrite_existing=True) engine.repository.process_repositories(site_name, overwrite_existing=True)
pkigenerator = catalog.pki_generator.PKIGenerator(site_name, pkigenerator = catalog.pki_generator.PKIGenerator(
author=author, site_name, author=author, duration=days)
duration=days)
output_paths = pkigenerator.generate() output_paths = pkigenerator.generate()
click.echo("Generated PKI files written to:\n%s" % '\n'.join(output_paths)) click.echo("Generated PKI files written to:\n%s" % '\n'.join(output_paths))
@ -441,13 +450,12 @@ def generate_pki(site_name, author, days):
'wrap', 'wrap',
help='Wrap bare files (e.g. pem or crt) in a PeglegManagedDocument ' help='Wrap bare files (e.g. pem or crt) in a PeglegManagedDocument '
'and encrypt them (by default).') 'and encrypt them (by default).')
@click.option('-a', @click.option(
'--author', '-a', '--author', 'author', help='Author for the new wrapped file.')
'author', @click.option(
help='Author for the new wrapped file.') '--filename',
@click.option('--filename', 'filename',
'filename', help='The relative file path for the file to be wrapped.')
help='The relative file path for the file to be wrapped.')
@click.option( @click.option(
'-o', '-o',
'--output-path', '--output-path',
@ -455,53 +463,58 @@ def generate_pki(site_name, author, days):
required=False, required=False,
help='The output path for the wrapped file. (default: input path with ' help='The output path for the wrapped file. (default: input path with '
'.yaml)') '.yaml)')
@click.option('-s', @click.option(
'--schema', '-s',
'schema', '--schema',
help='The schema for the document to be wrapped, e.g. ' 'schema',
'deckhand/Certificate/v1') help='The schema for the document to be wrapped, e.g. '
@click.option('-n', 'deckhand/Certificate/v1')
'--name', @click.option(
'name', '-n',
help='The name for the document to be wrapped, e.g. new-cert') '--name',
@click.option('-l', 'name',
'--layer', help='The name for the document to be wrapped, e.g. new-cert')
'layer', @click.option(
help='The layer for the document to be wrapped., e.g. site.') '-l',
@click.option('--encrypt/--no-encrypt', '--layer',
'encrypt', 'layer',
is_flag=True, help='The layer for the document to be wrapped., e.g. site.')
default=True, @click.option(
show_default=True, '--encrypt/--no-encrypt',
help='Whether to encrypt the wrapped file.') 'encrypt',
is_flag=True,
default=True,
show_default=True,
help='Whether to encrypt the wrapped file.')
@click.argument('site_name') @click.argument('site_name')
def wrap_secret_cli(*, site_name, author, filename, output_path, schema, name, def wrap_secret_cli(
layer, encrypt): *, site_name, author, filename, output_path, schema, name, layer,
encrypt):
"""Wrap a bare secrets file in a YAML and ManagedDocument. """Wrap a bare secrets file in a YAML and ManagedDocument.
""" """
engine.repository.process_repositories(site_name, overwrite_existing=True) engine.repository.process_repositories(site_name, overwrite_existing=True)
wrap_secret(author, wrap_secret(
filename, author,
output_path, filename,
schema, output_path,
name, schema,
layer, name,
encrypt, layer,
site_name=site_name) encrypt,
site_name=site_name)
@site.command('genesis_bundle', @site.command(
help='Construct the genesis deployment bundle.') 'genesis_bundle', help='Construct the genesis deployment bundle.')
@click.option('-b', @click.option(
'--build-dir', '-b',
'build_dir', '--build-dir',
type=click.Path(file_okay=False, 'build_dir',
dir_okay=True, type=click.Path(file_okay=False, dir_okay=True, resolve_path=True),
resolve_path=True), required=True,
required=True, help='Destination directory to store the genesis bundle.')
help='Destination directory to store the genesis bundle.')
@click.option( @click.option(
'--include-validators', '--include-validators',
'validators', 'validators',
@ -512,8 +525,9 @@ def wrap_secret_cli(*, site_name, author, filename, output_path, schema, name,
@SITE_REPOSITORY_ARGUMENT @SITE_REPOSITORY_ARGUMENT
def genesis_bundle(*, build_dir, validators, site_name): def genesis_bundle(*, build_dir, validators, site_name):
encryption_key = os.environ.get("PROMENADE_ENCRYPTION_KEY") encryption_key = os.environ.get("PROMENADE_ENCRYPTION_KEY")
bundle.build_genesis(build_dir, encryption_key, validators, bundle.build_genesis(
logging.DEBUG == LOG.getEffectiveLevel(), site_name) build_dir, encryption_key, validators,
logging.DEBUG == LOG.getEffectiveLevel(), site_name)
@secrets.command( @secrets.command(
@ -534,8 +548,9 @@ def check_pki_certs(site_name, days):
cert_results = engine.secrets.check_cert_expiry(site_name, duration=days) cert_results = engine.secrets.check_cert_expiry(site_name, duration=days)
click.echo("The following certs will expire within {} days: \n{}".format( click.echo(
days, cert_results)) "The following certs will expire within {} days: \n{}".format(
days, cert_results))
@main.group(help='Commands related to types') @main.group(help='Commands related to types')
@ -544,8 +559,9 @@ def check_pki_certs(site_name, days):
@EXTRA_REPOSITORY_OPTION @EXTRA_REPOSITORY_OPTION
@REPOSITORY_USERNAME_OPTION @REPOSITORY_USERNAME_OPTION
@REPOSITORY_KEY_OPTION @REPOSITORY_KEY_OPTION
def type(*, site_repository, clone_path, extra_repositories, repo_key, def type(
repo_username): *, site_repository, clone_path, extra_repositories, repo_key,
repo_username):
"""Group for repo-level actions, which include: """Group for repo-level actions, which include:
* list: list all types across the repository * list: list all types across the repository
@ -566,8 +582,8 @@ def list_types(*, output_stream):
engine.type.list_types(output_stream) engine.type.list_types(output_stream)
@secrets.group(name='generate', @secrets.group(
help='Command group to generate site secrets documents.') name='generate', help='Command group to generate site secrets documents.')
def generate(): def generate():
pass pass
@ -591,12 +607,13 @@ def generate():
required=True, required=True,
help='Identifier for the program or person who is generating the secrets ' help='Identifier for the program or person who is generating the secrets '
'documents') 'documents')
@click.option('-i', @click.option(
'--interactive', '-i',
'interactive', '--interactive',
is_flag=True, 'interactive',
default=False, is_flag=True,
help='Generate passphrases interactively, not automatically') default=False,
help='Generate passphrases interactively, not automatically')
@click.option( @click.option(
'--force-cleartext', '--force-cleartext',
'force_cleartext', 'force_cleartext',
@ -604,17 +621,18 @@ def generate():
default=False, default=False,
show_default=True, show_default=True,
help='Force cleartext generation of passphrases. This is not recommended.') help='Force cleartext generation of passphrases. This is not recommended.')
def generate_passphrases(*, site_name, save_location, author, interactive, def generate_passphrases(
force_cleartext): *, site_name, save_location, author, interactive, force_cleartext):
engine.repository.process_repositories(site_name) engine.repository.process_repositories(site_name)
engine.secrets.generate_passphrases(site_name, save_location, author, engine.secrets.generate_passphrases(
interactive, force_cleartext) site_name, save_location, author, interactive, force_cleartext)
@secrets.command('encrypt', @secrets.command(
help='Command to encrypt and wrap site secrets ' 'encrypt',
'documents with metadata.storagePolicy set ' help='Command to encrypt and wrap site secrets '
'to encrypted, in pegleg managed documents.') 'documents with metadata.storagePolicy set '
'to encrypted, in pegleg managed documents.')
@click.option( @click.option(
'-s', '-s',
'--save-location', '--save-location',
@ -639,14 +657,16 @@ def encrypt(*, save_location, author, site_name):
engine.secrets.encrypt(save_location, author, site_name=site_name) engine.secrets.encrypt(save_location, author, site_name=site_name)
@secrets.command('decrypt', @secrets.command(
help='Command to unwrap and decrypt one site ' 'decrypt',
'secrets document and print it to stdout.') help='Command to unwrap and decrypt one site '
@click.option('--path', 'secrets document and print it to stdout.')
'path', @click.option(
type=click.Path(exists=True, readable=True), '--path',
required=True, 'path',
help='The file or directory path to decrypt.') type=click.Path(exists=True, readable=True),
required=True,
help='The file or directory path to decrypt.')
@click.option( @click.option(
'-s', '-s',
'--save-location', '--save-location',
@ -688,27 +708,31 @@ def generate():
@generate.command( @generate.command(
'passphrase', 'passphrase',
help='Command to generate a passphrase and print out to stdout') help='Command to generate a passphrase and print out to stdout')
@click.option('-l', @click.option(
'--length', '-l',
'length', '--length',
default=24, 'length',
show_default=True, default=24,
help='Generate a passphrase of the given length. ' show_default=True,
'Length is >= 24, no maximum length.') help='Generate a passphrase of the given length. '
'Length is >= 24, no maximum length.')
def generate_passphrase(length): def generate_passphrase(length):
click.echo('Generated Passhprase: {}'.format( click.echo(
engine.secrets.generate_crypto_string(length))) 'Generated Passhprase: {}'.format(
engine.secrets.generate_crypto_string(length)))
@generate.command('salt', @generate.command(
help='Command to generate a salt and print out to stdout') 'salt', help='Command to generate a salt and print out to stdout')
@click.option('-l', @click.option(
'--length', '-l',
'length', '--length',
default=24, 'length',
show_default=True, default=24,
help='Generate a passphrase of the given length. ' show_default=True,
'Length is >= 24, no maximum length.') help='Generate a passphrase of the given length. '
'Length is >= 24, no maximum length.')
def generate_salt(length): def generate_salt(length):
click.echo("Generated Salt: {}".format( click.echo(
engine.secrets.generate_crypto_string(length))) "Generated Salt: {}".format(
engine.secrets.generate_crypto_string(length)))

View File

@ -16,16 +16,15 @@ import logging
import os import os
import click import click
from promenade.builder import Builder
from promenade.config import Configuration
from promenade import exceptions
from pegleg.engine.exceptions import GenesisBundleEncryptionException from pegleg.engine.exceptions import GenesisBundleEncryptionException
from pegleg.engine.exceptions import GenesisBundleGenerateException from pegleg.engine.exceptions import GenesisBundleGenerateException
from pegleg.engine import util from pegleg.engine import util
from pegleg.engine.util.pegleg_secret_management import PeglegSecretManagement from pegleg.engine.util.pegleg_secret_management import PeglegSecretManagement
from promenade.builder import Builder
from promenade.config import Configuration
from promenade import exceptions
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
__all__ = [ __all__ = [
@ -80,8 +79,8 @@ def build_genesis(build_path, encryption_key, validators, debug, site_name):
raise GenesisBundleEncryptionException() raise GenesisBundleEncryptionException()
except exceptions.PromenadeException as e: except exceptions.PromenadeException as e:
LOG.error('Build genesis bundle failed! {}.'.format( LOG.error(
e.display(debug=debug))) 'Build genesis bundle failed! {}.'.format(e.display(debug=debug)))
raise GenesisBundleGenerateException() raise GenesisBundleGenerateException()
LOG.info('=== Done! ===') LOG.info('=== Done! ===')

View File

@ -42,8 +42,8 @@ class PKIGenerator(object):
""" """
def __init__(self, sitename, block_strings=True, author=None, def __init__(
duration=365): self, sitename, block_strings=True, author=None, duration=365):
"""Constructor for ``PKIGenerator``. """Constructor for ``PKIGenerator``.
:param int duration: Duration in days that generated certificates :param int duration: Duration in days that generated certificates
@ -61,8 +61,8 @@ class PKIGenerator(object):
self._documents = util.definition.documents_for_site(sitename) self._documents = util.definition.documents_for_site(sitename)
self._author = author self._author = author
self.keys = pki_utility.PKIUtility(block_strings=block_strings, self.keys = pki_utility.PKIUtility(
duration=duration) block_strings=block_strings, duration=duration)
self.outputs = collections.defaultdict(dict) self.outputs = collections.defaultdict(dict)
# Maps certificates to CAs in order to derive certificate paths. # Maps certificates to CAs in order to derive certificate paths.
@ -119,10 +119,8 @@ class PKIGenerator(object):
def gen_cert(self, document_name, *, ca_cert, ca_key, **kwargs): def gen_cert(self, document_name, *, ca_cert, ca_key, **kwargs):
ca_cert_data = ca_cert['data']['managedDocument']['data'] ca_cert_data = ca_cert['data']['managedDocument']['data']
ca_key_data = ca_key['data']['managedDocument']['data'] ca_key_data = ca_key['data']['managedDocument']['data']
return self.keys.generate_certificate(document_name, return self.keys.generate_certificate(
ca_cert=ca_cert_data, document_name, ca_cert=ca_cert_data, ca_key=ca_key_data, **kwargs)
ca_key=ca_key_data,
**kwargs)
def gen_keypair(self, document_name): def gen_keypair(self, document_name):
return self.keys.generate_keypair(document_name) return self.keys.generate_keypair(document_name)
@ -149,30 +147,31 @@ class PKIGenerator(object):
docs = self._find_among_collected(schemas, document_name) docs = self._find_among_collected(schemas, document_name)
if docs: if docs:
if len(docs) == len(kinds): if len(docs) == len(kinds):
LOG.debug('Found docs in input config named %s, kinds: %s', LOG.debug(
document_name, kinds) 'Found docs in input config named %s, kinds: %s',
document_name, kinds)
return docs return docs
else: else:
raise exceptions.IncompletePKIPairError(kinds=kinds, raise exceptions.IncompletePKIPairError(
name=document_name) kinds=kinds, name=document_name)
else: else:
docs = self._find_among_outputs(schemas, document_name) docs = self._find_among_outputs(schemas, document_name)
if docs: if docs:
LOG.debug('Found docs in current outputs named %s, kinds: %s', LOG.debug(
document_name, kinds) 'Found docs in current outputs named %s, kinds: %s',
document_name, kinds)
return docs return docs
# TODO(felipemonteiro): Should this be a critical error? # TODO(felipemonteiro): Should this be a critical error?
LOG.debug('No docs existing docs named %s, kinds: %s', document_name, LOG.debug(
kinds) 'No docs existing docs named %s, kinds: %s', document_name, kinds)
return [] return []
def _find_among_collected(self, schemas, document_name): def _find_among_collected(self, schemas, document_name):
result = [] result = []
for schema in schemas: for schema in schemas:
doc = _find_document_by(self._documents, doc = _find_document_by(
schema=schema, self._documents, schema=schema, name=document_name)
name=document_name)
# If the document wasn't found, then means it needs to be # If the document wasn't found, then means it needs to be
# generated. # generated.
if doc: if doc:
@ -224,20 +223,21 @@ class PKIGenerator(object):
document = PeglegSecretManagement( document = PeglegSecretManagement(
docs=[document]).get_encrypted_secrets()[0][0] docs=[document]).get_encrypted_secrets()[0][0]
util.files.dump(document, util.files.dump(
output_path, document,
flag='a', output_path,
default_flow_style=False, flag='a',
explicit_start=True, default_flow_style=False,
indent=2) explicit_start=True,
indent=2)
output_paths.add(output_path) output_paths.add(output_path)
return output_paths return output_paths
def get_documents(self): def get_documents(self):
return list( return list(
itertools.chain.from_iterable(v.values() itertools.chain.from_iterable(
for v in self.outputs.values())) v.values() for v in self.outputs.values()))
def get_host_list(service_names): def get_host_list(service_names):

View File

@ -69,18 +69,18 @@ class PKIUtility(object):
raise exceptions.PKICertificateInvalidDuration() raise exceptions.PKICertificateInvalidDuration()
if not self._ca_config_string: if not self._ca_config_string:
self._ca_config_string = json.dumps({ self._ca_config_string = json.dumps(
'signing': { {
'default': { 'signing': {
'expiry': 'default': {
str(24 * self.duration) + 'h', 'expiry': str(24 * self.duration) + 'h',
'usages': [ 'usages': [
'signing', 'key encipherment', 'server auth', 'signing', 'key encipherment', 'server auth',
'client auth' 'client auth'
], ],
},
}, },
}, })
})
return self._ca_config_string return self._ca_config_string
def generate_ca(self, ca_name): def generate_ca(self, ca_name):
@ -92,11 +92,13 @@ class PKIUtility(object):
""" """
result = self._cfssl(['gencert', '-initca', 'csr.json'], result = self._cfssl(
files={'csr.json': self.csr(name=ca_name)}) ['gencert', '-initca', 'csr.json'],
files={'csr.json': self.csr(name=ca_name)})
return (self._wrap_ca(ca_name, result['cert']), return (
self._wrap_ca_key(ca_name, result['key'])) self._wrap_ca(ca_name, result['cert']),
self._wrap_ca_key(ca_name, result['key']))
def generate_keypair(self, name): def generate_keypair(self, name):
"""Generate keypair. """Generate keypair.
@ -114,17 +116,12 @@ class PKIUtility(object):
'priv.pem': priv_result['priv.pem'], 'priv.pem': priv_result['priv.pem'],
}) })
return (self._wrap_pub_key(name, pub_result['pub.pem']), return (
self._wrap_priv_key(name, priv_result['priv.pem'])) self._wrap_pub_key(name, pub_result['pub.pem']),
self._wrap_priv_key(name, priv_result['priv.pem']))
def generate_certificate(self, def generate_certificate(
name, self, name, *, ca_cert, ca_key, cn, groups=None, hosts=None):
*,
ca_cert,
ca_key,
cn,
groups=None,
hosts=None):
"""Generate certificate and associated key given CA cert and key. """Generate certificate and associated key given CA cert and key.
:param str name: Name of certificate in wrapped document. :param str name: Name of certificate in wrapped document.
@ -155,10 +152,12 @@ class PKIUtility(object):
'csr.json': self.csr(name=cn, groups=groups, hosts=hosts), 'csr.json': self.csr(name=cn, groups=groups, hosts=hosts),
}) })
return (self._wrap_cert(name, result['cert']), return (
self._wrap_cert_key(name, result['key'])) self._wrap_cert(name, result['cert']),
self._wrap_cert_key(name, result['key']))
def csr(self, def csr(
self,
*, *,
name, name,
groups=None, groups=None,
@ -172,14 +171,15 @@ class PKIUtility(object):
if hosts is None: if hosts is None:
hosts = [] hosts = []
return json.dumps({ return json.dumps(
'CN': name, {
'key': key, 'CN': name,
'hosts': hosts, 'key': key,
'names': [{ 'hosts': hosts,
'O': g 'names': [{
} for g in groups], 'O': g
}) } for g in groups],
})
def cert_info(self, cert): def cert_info(self, cert):
"""Retrieve certificate info via ``cfssl``. """Retrieve certificate info via ``cfssl``.
@ -190,8 +190,8 @@ class PKIUtility(object):
""" """
return self._cfssl(['certinfo', '-cert', 'cert.pem'], return self._cfssl(
files={'cert.pem': cert}) ['certinfo', '-cert', 'cert.pem'], files={'cert.pem': cert})
def check_expiry(self, cert): def check_expiry(self, cert):
"""Chek whether a given certificate is expired. """Chek whether a given certificate is expired.
@ -223,8 +223,8 @@ class PKIUtility(object):
files = {} files = {}
with tempfile.TemporaryDirectory() as tmp: with tempfile.TemporaryDirectory() as tmp:
for filename, data in files.items(): for filename, data in files.items():
util.files.write(decode_bytes(data), util.files.write(
os.path.join(tmp, filename)) decode_bytes(data), os.path.join(tmp, filename))
# Ignore bandit false positive: # Ignore bandit false positive:
# B603:subprocess_without_shell_equals_true # B603:subprocess_without_shell_equals_true
@ -241,8 +241,8 @@ class PKIUtility(object):
with tempfile.TemporaryDirectory() as tmp: with tempfile.TemporaryDirectory() as tmp:
for filename, data in files.items(): for filename, data in files.items():
util.files.write(decode_bytes(data), util.files.write(
os.path.join(tmp, filename)) decode_bytes(data), os.path.join(tmp, filename))
# Ignore bandit false positive: # Ignore bandit false positive:
# B603:subprocess_without_shell_equals_true # B603:subprocess_without_shell_equals_true
@ -261,40 +261,46 @@ class PKIUtility(object):
return result return result
def _wrap_ca(self, name, data): def _wrap_ca(self, name, data):
return self.wrap_document(kind='CertificateAuthority', return self.wrap_document(
name=name, kind='CertificateAuthority',
data=data, name=name,
block_strings=self.block_strings) data=data,
block_strings=self.block_strings)
def _wrap_ca_key(self, name, data): def _wrap_ca_key(self, name, data):
return self.wrap_document(kind='CertificateAuthorityKey', return self.wrap_document(
name=name, kind='CertificateAuthorityKey',
data=data, name=name,
block_strings=self.block_strings) data=data,
block_strings=self.block_strings)
def _wrap_cert(self, name, data): def _wrap_cert(self, name, data):
return self.wrap_document(kind='Certificate', return self.wrap_document(
name=name, kind='Certificate',
data=data, name=name,
block_strings=self.block_strings) data=data,
block_strings=self.block_strings)
def _wrap_cert_key(self, name, data): def _wrap_cert_key(self, name, data):
return self.wrap_document(kind='CertificateKey', return self.wrap_document(
name=name, kind='CertificateKey',
data=data, name=name,
block_strings=self.block_strings) data=data,
block_strings=self.block_strings)
def _wrap_priv_key(self, name, data): def _wrap_priv_key(self, name, data):
return self.wrap_document(kind='PrivateKey', return self.wrap_document(
name=name, kind='PrivateKey',
data=data, name=name,
block_strings=self.block_strings) data=data,
block_strings=self.block_strings)
def _wrap_pub_key(self, name, data): def _wrap_pub_key(self, name, data):
return self.wrap_document(kind='PublicKey', return self.wrap_document(
name=name, kind='PublicKey',
data=data, name=name,
block_strings=self.block_strings) data=data,
block_strings=self.block_strings)
@staticmethod @staticmethod
def wrap_document(kind, name, data, block_strings=True): def wrap_document(kind, name, data, block_strings=True):
@ -319,8 +325,8 @@ class PKIUtility(object):
}, },
'storagePolicy': 'cleartext' 'storagePolicy': 'cleartext'
} }
wrapped_data = PKIUtility._block_literal(data, wrapped_data = PKIUtility._block_literal(
block_strings=block_strings) data, block_strings=block_strings)
document = { document = {
"schema": wrapped_schema, "schema": wrapped_schema,

View File

@ -50,8 +50,9 @@ class BaseCatalog(ABC):
if schema == 'pegleg/%s/v1' % kind: if schema == 'pegleg/%s/v1' % kind:
self._catalog_docs.append(document) self._catalog_docs.append(document)
elif schema == 'promenade/%s/v1' % kind: elif schema == 'promenade/%s/v1' % kind:
LOG.warning('The schema promenade/%s/v1 is deprecated. Use ' LOG.warning(
'pegleg/%s/v1 instead.', kind, kind) 'The schema promenade/%s/v1 is deprecated. Use '
'pegleg/%s/v1 instead.', kind, kind)
self._catalog_docs.append(document) self._catalog_docs.append(document)
@property @property
@ -73,8 +74,9 @@ class BaseCatalog(ABC):
if not self._catalog_path: if not self._catalog_path:
# Cound not find the Catalog for this generated passphrase # Cound not find the Catalog for this generated passphrase
# raise an exception. # raise an exception.
LOG.error('Catalog path: {} was not found in repo: {}'.format( LOG.error(
catalog_name, repo_name)) 'Catalog path: {} was not found in repo: {}'.format(
catalog_name, repo_name))
raise PassphraseCatalogNotFoundException() raise PassphraseCatalogNotFoundException()
def _get_document_name(self, name): def _get_document_name(self, name):

View File

@ -57,9 +57,9 @@ class PassphraseCatalog(BaseCatalog):
@property @property
def get_passphrase_names(self): def get_passphrase_names(self):
"""Return the list of passphrases in the catalog.""" """Return the list of passphrases in the catalog."""
return (passphrase[P_DOCUMENT_NAME] return (
for catalog in self._catalog_docs passphrase[P_DOCUMENT_NAME] for catalog in self._catalog_docs
for passphrase in catalog['data']['passphrases']) for passphrase in catalog['data']['passphrases'])
def get_length(self, passphrase_name): def get_length(self, passphrase_name):
""" """

View File

@ -14,9 +14,10 @@
import logging import logging
__all__ = ('PeglegBaseException', 'GitException', 'GitAuthException', __all__ = (
'GitProxyException', 'GitSSHException', 'GitConfigException', 'PeglegBaseException', 'GitException', 'GitAuthException',
'GitInvalidRepoException') 'GitProxyException', 'GitSSHException', 'GitConfigException',
'GitInvalidRepoException')
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
@ -37,14 +38,16 @@ class PeglegBaseException(Exception):
class GitException(PeglegBaseException): class GitException(PeglegBaseException):
"""Exception when an error occurs cloning a Git repository.""" """Exception when an error occurs cloning a Git repository."""
message = ('Git exception occurred: [%(location)s] may not be a valid ' message = (
'git repository. Details: %(details)s') 'Git exception occurred: [%(location)s] may not be a valid '
'git repository. Details: %(details)s')
class GitAuthException(PeglegBaseException): class GitAuthException(PeglegBaseException):
"""Exception that occurs when authentication fails for cloning a repo.""" """Exception that occurs when authentication fails for cloning a repo."""
message = ('Failed to authenticate for repo %(repo_url)s with ssh-key ' message = (
'at path %(ssh_key_path)s') 'Failed to authenticate for repo %(repo_url)s with ssh-key '
'at path %(ssh_key_path)s')
class GitProxyException(PeglegBaseException): class GitProxyException(PeglegBaseException):
@ -84,8 +87,9 @@ class IncompletePKIPairError(PeglegBaseException):
class PassphraseCatalogNotFoundException(PeglegBaseException): class PassphraseCatalogNotFoundException(PeglegBaseException):
"""Failed to find Catalog for Passphrases generation.""" """Failed to find Catalog for Passphrases generation."""
message = ('Could not find the Passphrase Catalog to generate ' message = (
'the site Passphrases!') 'Could not find the Passphrase Catalog to generate '
'the site Passphrases!')
class GenesisBundleEncryptionException(PeglegBaseException): class GenesisBundleEncryptionException(PeglegBaseException):
@ -106,8 +110,9 @@ class GenesisBundleGenerateException(PeglegBaseException):
class PKICertificateInvalidDuration(PeglegBaseException): class PKICertificateInvalidDuration(PeglegBaseException):
"""Exception for invalid duration of PKI Certificate.""" """Exception for invalid duration of PKI Certificate."""
message = ('Provided duration is invalid. Certificate durations must be ' message = (
'a positive integer.') 'Provided duration is invalid. Certificate durations must be '
'a positive integer.')
# #
@ -142,8 +147,9 @@ class SaltInsufficientLengthException(PeglegBaseException):
class GlobalCredentialsNotFound(PeglegBaseException): class GlobalCredentialsNotFound(PeglegBaseException):
"""Exception raised when global_passphrase or global_salt are not found.""" """Exception raised when global_passphrase or global_salt are not found."""
message = ('global_salt and global_passphrase must either both be ' message = (
'defined, or neither can be defined in site documents.') 'global_salt and global_passphrase must either both be '
'defined, or neither can be defined in site documents.')
# #

View File

@ -71,9 +71,7 @@ class BaseGenerator(ABC):
def get_save_path(self, passphrase_name): def get_save_path(self, passphrase_name):
"""Calculate and return the save path of the ``passphrase_name``.""" """Calculate and return the save path of the ``passphrase_name``."""
return os.path.abspath(os.path.join(self._save_location, return os.path.abspath(
'site', os.path.join(
self._sitename, self._save_location, 'site', self._sitename, 'secrets',
'secrets', self.kind_path, '{}.yaml'.format(passphrase_name)))
self.kind_path,
'{}.yaml'.format(passphrase_name)))

View File

@ -46,10 +46,10 @@ class PassphraseGenerator(BaseGenerator):
certificates. certificates.
""" """
super(PassphraseGenerator, self).__init__(sitename, save_location, super(PassphraseGenerator,
author) self).__init__(sitename, save_location, author)
self._catalog = PassphraseCatalog(self._sitename, self._catalog = PassphraseCatalog(
documents=self._documents) self._sitename, documents=self._documents)
self._pass_util = CryptoString() self._pass_util = CryptoString()
def generate(self, interactive=False, force_cleartext=False): def generate(self, interactive=False, force_cleartext=False):
@ -81,8 +81,9 @@ class PassphraseGenerator(BaseGenerator):
docs = list() docs = list()
if force_cleartext: if force_cleartext:
storage_policy = passphrase_catalog.P_CLEARTEXT storage_policy = passphrase_catalog.P_CLEARTEXT
LOG.warning("Passphrases for {} will be " LOG.warning(
"generated in clear text.".format(p_name)) "Passphrases for {} will be "
"generated in clear text.".format(p_name))
else: else:
storage_policy = self._catalog.get_storage_policy(p_name) storage_policy = self._catalog.get_storage_policy(p_name)

View File

@ -12,13 +12,13 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import click
import logging import logging
import os import os
import pkg_resources
import shutil import shutil
import textwrap import textwrap
import click
import pkg_resources
from prettytable import PrettyTable from prettytable import PrettyTable
from pegleg import config from pegleg import config
@ -84,10 +84,11 @@ def full(fail_on_missing_sub_src=False, exclude_lint=None, warn_lint=None):
messages=messages, exclude_lint=exclude_lint, warn_lint=warn_lint) messages=messages, exclude_lint=exclude_lint, warn_lint=warn_lint)
def site(site_name, def site(
fail_on_missing_sub_src=False, site_name,
exclude_lint=None, fail_on_missing_sub_src=False,
warn_lint=None): exclude_lint=None,
warn_lint=None):
"""Lint ``site_name``. """Lint ``site_name``.
:param str site_name: Name of site to lint. :param str site_name: Name of site to lint.
@ -133,10 +134,8 @@ def site(site_name,
messages=messages, exclude_lint=exclude_lint, warn_lint=warn_lint) messages=messages, exclude_lint=exclude_lint, warn_lint=warn_lint)
def _filter_messages_by_warn_and_error_lint(*, def _filter_messages_by_warn_and_error_lint(
messages=None, *, messages=None, exclude_lint=None, warn_lint=None):
exclude_lint=None,
warn_lint=None):
"""Helper that only filters messages depending on whether or not they """Helper that only filters messages depending on whether or not they
are present in ``exclude_lint`` or ``warn_lint``. are present in ``exclude_lint`` or ``warn_lint``.
@ -171,8 +170,8 @@ def _filter_messages_by_warn_and_error_lint(*,
if errors: if errors:
raise click.ClickException( raise click.ClickException(
'Linting failed:\n' + errors_table.get_string() + 'Linting failed:\n' + errors_table.get_string()
'\nLinting warnings:\n' + warnings_table.get_string()) + '\nLinting warnings:\n' + warnings_table.get_string())
return warns return warns
@ -189,14 +188,18 @@ def _verify_no_unexpected_files(*, sitenames=None):
errors = [] errors = []
for unused_dir in sorted(found_directories - expected_directories): for unused_dir in sorted(found_directories - expected_directories):
errors.append((REPOS_MISSING_DIRECTORIES_FLAG, errors.append(
'%s exists, but is unused' % unused_dir)) (
REPOS_MISSING_DIRECTORIES_FLAG,
'%s exists, but is unused' % unused_dir))
for missing_dir in sorted(expected_directories - found_directories): for missing_dir in sorted(expected_directories - found_directories):
if not missing_dir.endswith('common'): if not missing_dir.endswith('common'):
errors.append( errors.append(
(REPOS_MISSING_DIRECTORIES_FLAG, (
'%s was not found, but expected by manifest' % missing_dir)) REPOS_MISSING_DIRECTORIES_FLAG,
'%s was not found, but expected by manifest'
% missing_dir))
return errors return errors
@ -219,16 +222,20 @@ def _verify_single_file(filename, schemas):
LOG.debug("Validating file %s.", filename) LOG.debug("Validating file %s.", filename)
with open(filename, 'r') as f: with open(filename, 'r') as f:
if not f.read(4) == '---\n': if not f.read(4) == '---\n':
errors.append((FILE_MISSING_YAML_DOCUMENT_HEADER, errors.append(
'%s does not begin with YAML beginning of document ' (
'marker "---".' % filename)) FILE_MISSING_YAML_DOCUMENT_HEADER,
'%s does not begin with YAML beginning of document '
'marker "---".' % filename))
documents = [] documents = []
try: try:
documents = util.files.read(filename) documents = util.files.read(filename)
except Exception as e: except Exception as e:
errors.append((FILE_CONTAINS_INVALID_YAML, errors.append(
'%s is not valid yaml: %s' % (filename, e))) (
FILE_CONTAINS_INVALID_YAML, '%s is not valid yaml: %s' %
(filename, e)))
for document in documents: for document in documents:
errors.extend(_verify_document(document, schemas, filename)) errors.extend(_verify_document(document, schemas, filename))
@ -245,18 +252,20 @@ MANDATORY_ENCRYPTED_TYPES = {
def _verify_document(document, schemas, filename): def _verify_document(document, schemas, filename):
name = ':'.join([ name = ':'.join(
document.get('schema', ''), [
document.get('metadata', {}).get('name', '') document.get('schema', ''),
]) document.get('metadata', {}).get('name', '')
])
errors = [] errors = []
layer = _layer(document) layer = _layer(document)
if layer is not None and layer != _expected_layer(filename): if layer is not None and layer != _expected_layer(filename):
errors.append( errors.append(
(DOCUMENT_LAYER_MISMATCH, (
'%s (document %s) had unexpected layer "%s", expected "%s"' % DOCUMENT_LAYER_MISMATCH,
(filename, name, layer, _expected_layer(filename)))) '%s (document %s) had unexpected layer "%s", expected "%s"' %
(filename, name, layer, _expected_layer(filename))))
# secrets must live in the appropriate directory, and must be # secrets must live in the appropriate directory, and must be
# "storagePolicy: encrypted". # "storagePolicy: encrypted".
@ -264,16 +273,19 @@ def _verify_document(document, schemas, filename):
storage_policy = document.get('metadata', {}).get('storagePolicy') storage_policy = document.get('metadata', {}).get('storagePolicy')
if (storage_policy != 'encrypted'): if (storage_policy != 'encrypted'):
errors.append((SCHEMA_STORAGE_POLICY_MISMATCH_FLAG, errors.append(
'%s (document %s) is a secret, but has unexpected ' (
'storagePolicy: "%s"' % (filename, name, SCHEMA_STORAGE_POLICY_MISMATCH_FLAG,
storage_policy))) '%s (document %s) is a secret, but has unexpected '
'storagePolicy: "%s"' % (filename, name, storage_policy)))
# Check if the file is in a secrets directory # Check if the file is in a secrets directory
if not util.files.file_in_subdir(filename, 'secrets'): if not util.files.file_in_subdir(filename, 'secrets'):
errors.append((SECRET_NOT_ENCRYPTED_POLICY, errors.append(
'%s (document %s) is a secret, is not stored in a' (
' secrets path' % (filename, name))) SECRET_NOT_ENCRYPTED_POLICY,
'%s (document %s) is a secret, is not stored in a'
' secrets path' % (filename, name)))
return errors return errors
@ -303,8 +315,10 @@ def _verify_deckhand_render(*, sitename=None, fail_on_missing_sub_src=False):
all_errors = [] all_errors = []
if sitename: if sitename:
documents_to_render = [_handle_managed_document(doc) for doc in documents_to_render = [
util.definition.documents_for_site(sitename)] _handle_managed_document(doc)
for doc in util.definition.documents_for_site(sitename)
]
LOG.debug('Rendering documents for site: %s.', sitename) LOG.debug('Rendering documents for site: %s.', sitename)
_, errors = util.deckhand.deckhand_render( _, errors = util.deckhand.deckhand_render(
@ -312,23 +326,26 @@ def _verify_deckhand_render(*, sitename=None, fail_on_missing_sub_src=False):
fail_on_missing_sub_src=fail_on_missing_sub_src, fail_on_missing_sub_src=fail_on_missing_sub_src,
validate=True, validate=True,
) )
LOG.debug('Generated %d rendering errors for site: %s.', len(errors), LOG.debug(
sitename) 'Generated %d rendering errors for site: %s.', len(errors),
sitename)
all_errors.extend(errors) all_errors.extend(errors)
else: else:
documents_to_render = util.definition.documents_for_each_site() documents_to_render = util.definition.documents_for_each_site()
for site_name, documents in documents_to_render.items(): for site_name, documents in documents_to_render.items():
clean_documents = [_handle_managed_document(doc) for doc clean_documents = [
in documents] _handle_managed_document(doc) for doc in documents
]
LOG.debug('Rendering documents for site: %s.', site_name) LOG.debug('Rendering documents for site: %s.', site_name)
_, errors = util.deckhand.deckhand_render( _, errors = util.deckhand.deckhand_render(
documents=clean_documents, documents=clean_documents,
fail_on_missing_sub_src=fail_on_missing_sub_src, fail_on_missing_sub_src=fail_on_missing_sub_src,
validate=True, validate=True,
) )
LOG.debug('Generated %d rendering errors for site: %s.', LOG.debug(
len(errors), site_name) 'Generated %d rendering errors for site: %s.', len(errors),
site_name)
all_errors.extend(errors) all_errors.extend(errors)
return list(set(all_errors)) return list(set(all_errors))

View File

@ -28,8 +28,9 @@ from pegleg.engine import util
__all__ = ('process_repositories', 'process_site_repository') __all__ = ('process_repositories', 'process_site_repository')
__REPO_FOLDERS = {} __REPO_FOLDERS = {}
_INVALID_FORMAT_MSG = ("The repository %s must be in the form of " _INVALID_FORMAT_MSG = (
"name=repoUrl[@revision]") "The repository %s must be in the form of "
"name=repoUrl[@revision]")
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
@ -64,8 +65,9 @@ def process_repositories(site_name, overwrite_existing=False):
# Dict mapping repository names to associated URL/revision info for clone. # Dict mapping repository names to associated URL/revision info for clone.
repo_overrides = _process_repository_overrides(site_def_repos) repo_overrides = _process_repository_overrides(site_def_repos)
if not site_def_repos: if not site_def_repos:
LOG.info('No repositories found in site-definition.yaml for site: %s. ' LOG.info(
'Defaulting to specified repository overrides.', site_name) 'No repositories found in site-definition.yaml for site: %s. '
'Defaulting to specified repository overrides.', site_name)
site_def_repos = repo_overrides site_def_repos = repo_overrides
# Extract user/key that we will use for all repositories. # Extract user/key that we will use for all repositories.
@ -74,10 +76,10 @@ def process_repositories(site_name, overwrite_existing=False):
for repo_alias in site_def_repos.keys(): for repo_alias in site_def_repos.keys():
if repo_alias == "site": if repo_alias == "site":
LOG.warning("The primary site repository path must be specified " LOG.warning(
"via the -r flag. Ignoring the provided " "The primary site repository path must be specified "
"site-definition entry: %s", "via the -r flag. Ignoring the provided "
site_def_repos[repo_alias]) "site-definition entry: %s", site_def_repos[repo_alias])
continue continue
# Extract URL and revision, prioritizing overrides over the defaults in # Extract URL and revision, prioritizing overrides over the defaults in
@ -91,19 +93,22 @@ def process_repositories(site_name, overwrite_existing=False):
repo_url_or_path = _format_url_with_repo_username(repo_url_or_path) repo_url_or_path = _format_url_with_repo_username(repo_url_or_path)
LOG.info("Processing repository %s with url=%s, repo_key=%s, " LOG.info(
"repo_username=%s, revision=%s", repo_alias, repo_url_or_path, "Processing repository %s with url=%s, repo_key=%s, "
repo_key, repo_user, repo_revision) "repo_username=%s, revision=%s", repo_alias, repo_url_or_path,
repo_key, repo_user, repo_revision)
temp_extra_repo = _process_repository( temp_extra_repo = _process_repository(
repo_url_or_path, repo_revision, repo_url_or_path,
repo_revision,
overwrite_existing=overwrite_existing) overwrite_existing=overwrite_existing)
extra_repos.append(temp_extra_repo) extra_repos.append(temp_extra_repo)
# Overwrite the site repo and extra repos in the config because further # Overwrite the site repo and extra repos in the config because further
# processing will fail if they contain revision info in their paths. # processing will fail if they contain revision info in their paths.
LOG.debug("Updating site_repo=%s extra_repo_list=%s in config", site_repo, LOG.debug(
extra_repos) "Updating site_repo=%s extra_repo_list=%s in config", site_repo,
extra_repos)
config.set_site_repo(site_repo) config.set_site_repo(site_repo)
config.set_extra_repo_list(extra_repos) config.set_extra_repo_list(extra_repos)
@ -121,15 +126,16 @@ def process_site_repository(update_config=False, overwrite_existing=False):
# Retrieve the main site repository and validate it. # Retrieve the main site repository and validate it.
site_repo_or_path = config.get_site_repo() site_repo_or_path = config.get_site_repo()
if not site_repo_or_path: if not site_repo_or_path:
raise ValueError("Site repository directory (%s) must be specified" % raise ValueError(
site_repo_or_path) "Site repository directory (%s) must be specified"
% site_repo_or_path)
repo_url_or_path, repo_revision = _extract_repo_url_and_revision( repo_url_or_path, repo_revision = _extract_repo_url_and_revision(
site_repo_or_path) site_repo_or_path)
config.set_site_rev(repo_revision) config.set_site_rev(repo_revision)
repo_url_or_path = _format_url_with_repo_username(repo_url_or_path) repo_url_or_path = _format_url_with_repo_username(repo_url_or_path)
new_repo_path = _process_repository(repo_url_or_path, repo_revision, new_repo_path = _process_repository(
overwrite_existing=overwrite_existing) repo_url_or_path, repo_revision, overwrite_existing=overwrite_existing)
if update_config: if update_config:
# Overwrite the site repo in the config because further processing will # Overwrite the site repo in the config because further processing will
@ -140,8 +146,8 @@ def process_site_repository(update_config=False, overwrite_existing=False):
return new_repo_path return new_repo_path
def _process_repository(repo_url_or_path, repo_revision, def _process_repository(
overwrite_existing=False): repo_url_or_path, repo_revision, overwrite_existing=False):
"""Process a repository located at ``repo_url_or_path``. """Process a repository located at ``repo_url_or_path``.
:param str repo_url_or_path: Path to local repo or URL of remote URL. :param str repo_url_or_path: Path to local repo or URL of remote URL.
@ -191,9 +197,10 @@ def _process_site_repository(repo_url_or_path, repo_revision):
repo_key = config.get_repo_key() repo_key = config.get_repo_key()
repo_user = config.get_repo_username() repo_user = config.get_repo_username()
LOG.info("Processing repository %s with url=%s, repo_key=%s, " LOG.info(
"repo_username=%s, revision=%s", repo_alias, repo_url_or_path, "Processing repository %s with url=%s, repo_key=%s, "
repo_key, repo_user, repo_revision) "repo_username=%s, revision=%s", repo_alias, repo_url_or_path,
repo_key, repo_user, repo_revision)
return _handle_repository( return _handle_repository(
repo_url_or_path, ref=repo_revision, auth_key=repo_key) repo_url_or_path, ref=repo_revision, auth_key=repo_key)
@ -201,10 +208,11 @@ def _process_site_repository(repo_url_or_path, repo_revision):
def _get_and_validate_site_repositories(site_name, site_data): def _get_and_validate_site_repositories(site_name, site_data):
"""Validate that repositories entry exists in ``site_data``.""" """Validate that repositories entry exists in ``site_data``."""
if 'repositories' not in site_data: if 'repositories' not in site_data:
LOG.info("The repository for site_name: %s does not contain a " LOG.info(
"site-definition.yaml with a 'repositories' key. Ensure " "The repository for site_name: %s does not contain a "
"your repository is self-contained and doesn't require " "site-definition.yaml with a 'repositories' key. Ensure "
"extra repositories for correct rendering.", site_name) "your repository is self-contained and doesn't require "
"extra repositories for correct rendering.", site_name)
return site_data.get('repositories', {}) return site_data.get('repositories', {})
@ -249,9 +257,10 @@ def _process_repository_overrides(site_def_repos):
raise click.ClickException(_INVALID_FORMAT_MSG % repo_override) raise click.ClickException(_INVALID_FORMAT_MSG % repo_override)
if repo_alias == "site": if repo_alias == "site":
LOG.warning("The primary site repository path must be specified " LOG.warning(
"via the -r flag. Ignoring the provided override: %s", "The primary site repository path must be specified "
repo_override) "via the -r flag. Ignoring the provided override: %s",
repo_override)
continue continue
if repo_alias not in site_def_repos: if repo_alias not in site_def_repos:
@ -259,9 +268,10 @@ def _process_repository_overrides(site_def_repos):
# site-definition.yaml make a note of it in case the override # site-definition.yaml make a note of it in case the override
# is something bogus, but we won't make this a hard requirement, # is something bogus, but we won't make this a hard requirement,
# so just log the discrepancy. # so just log the discrepancy.
LOG.debug("Repo override: %s not found under `repositories` for " LOG.debug(
"site-definition.yaml. Site def repositories: %s", "Repo override: %s not found under `repositories` for "
repo_override, ", ".join(site_def_repos.keys())) "site-definition.yaml. Site def repositories: %s",
repo_override, ", ".join(site_def_repos.keys()))
repo_url, revision = _extract_repo_url_and_revision(repo_url_or_path) repo_url, revision = _extract_repo_url_and_revision(repo_url_or_path)
@ -286,7 +296,7 @@ def _extract_repo_url_and_revision(repo_url_or_path):
""" """
ssh_username_pattern = re.compile('ssh:\/\/.+@.+\/.+') ssh_username_pattern = re.compile(r'ssh:\/\/.+@.+\/.+')
def has_revision(repo_url_or_path): def has_revision(repo_url_or_path):
if repo_url_or_path.lower().startswith('ssh'): if repo_url_or_path.lower().startswith('ssh'):
@ -348,7 +358,8 @@ def _handle_repository(repo_url_or_path, *args, **kwargs):
except exceptions.GitException as e: except exceptions.GitException as e:
raise click.ClickException(e) raise click.ClickException(e)
except Exception as e: except Exception as e:
LOG.exception('Unknown exception was raised during git clone/checkout:' LOG.exception(
' %s', e) 'Unknown exception was raised during git clone/checkout:'
' %s', e)
# TODO(felipemonteiro): Use internal exceptions for this. # TODO(felipemonteiro): Use internal exceptions for this.
raise click.ClickException(e) raise click.ClickException(e)

View File

@ -61,11 +61,10 @@ def encrypt(save_location, author, site_name):
secrets_found = False secrets_found = False
for repo_base, file_path in definition.site_files_by_repo(site_name): for repo_base, file_path in definition.site_files_by_repo(site_name):
secrets_found = True secrets_found = True
PeglegSecretManagement(file_path=file_path, PeglegSecretManagement(
author=author, file_path=file_path, author=author,
site_name=site_name).encrypt_secrets( site_name=site_name).encrypt_secrets(
_get_dest_path(repo_base, file_path, _get_dest_path(repo_base, file_path, save_location))
save_location))
if secrets_found: if secrets_found:
LOG.info('Encryption of all secret files was completed.') LOG.info('Encryption of all secret files was completed.')
else: else:
@ -91,8 +90,9 @@ def decrypt(path, site_name=None):
file_dict = {} file_dict = {}
if not os.path.exists(path): if not os.path.exists(path):
LOG.error('Path: {} was not found. Check your path and site name, ' LOG.error(
'and try again.'.format(path)) 'Path: {} was not found. Check your path and site name, '
'and try again.'.format(path))
return file_dict return file_dict
if os.path.isfile(path): if os.path.isfile(path):
@ -136,11 +136,9 @@ def _get_dest_path(repo_base, file_path, save_location):
return file_path return file_path
def generate_passphrases(site_name, def generate_passphrases(
save_location, site_name, save_location, author, interactive=False,
author, force_cleartext=False):
interactive=False,
force_cleartext=False):
""" """
Look for the site passphrase catalogs, and for every passphrase entry in Look for the site passphrase catalogs, and for every passphrase entry in
the passphrase catalog generate a passphrase document, wrap the the passphrase catalog generate a passphrase document, wrap the
@ -154,9 +152,8 @@ def generate_passphrases(site_name,
:param bool force_cleartext: Whether to generate results in clear text :param bool force_cleartext: Whether to generate results in clear text
""" """
PassphraseGenerator(site_name, save_location, PassphraseGenerator(site_name, save_location, author).generate(
author).generate(interactive=interactive, interactive=interactive, force_cleartext=force_cleartext)
force_cleartext=force_cleartext)
def generate_crypto_string(length): def generate_crypto_string(length):
@ -170,14 +167,15 @@ def generate_crypto_string(length):
return CryptoString().get_crypto_string(length) return CryptoString().get_crypto_string(length)
def wrap_secret(author, def wrap_secret(
filename, author,
output_path, filename,
schema, output_path,
name, schema,
layer, name,
encrypt, layer,
site_name=None): encrypt,
site_name=None):
"""Wrap a bare secrets file in a YAML and ManagedDocument. """Wrap a bare secrets file in a YAML and ManagedDocument.
:param author: author for ManagedDocument :param author: author for ManagedDocument
@ -210,9 +208,8 @@ def wrap_secret(author,
} }
managed_secret = PeglegManagedSecret(inner_doc, author=author) managed_secret = PeglegManagedSecret(inner_doc, author=author)
if encrypt: if encrypt:
psm = PeglegSecretManagement(docs=[inner_doc], psm = PeglegSecretManagement(
author=author, docs=[inner_doc], author=author, site_name=site_name)
site_name=site_name)
output_doc = psm.get_encrypted_secrets()[0][0] output_doc = psm.get_encrypted_secrets()[0][0]
else: else:
output_doc = managed_secret.pegleg_document output_doc = managed_secret.pegleg_document

View File

@ -17,11 +17,10 @@ import os
import click import click
import git import git
from prettytable import PrettyTable
import yaml import yaml
from yaml.constructor import SafeConstructor from yaml.constructor import SafeConstructor
from prettytable import PrettyTable
from pegleg import config from pegleg import config
from pegleg.engine import util from pegleg.engine import util
from pegleg.engine.util import files from pegleg.engine.util import files
@ -51,10 +50,11 @@ def _collect_to_stdout(site_name):
for line in _read_and_format_yaml(filename): for line in _read_and_format_yaml(filename):
# This code is a pattern to convert \r\n to \n. # This code is a pattern to convert \r\n to \n.
click.echo("\n".join(line.splitlines())) click.echo("\n".join(line.splitlines()))
res = yaml.safe_dump(_get_deployment_data_doc(), res = yaml.safe_dump(
explicit_start=True, _get_deployment_data_doc(),
explicit_end=True, explicit_start=True,
default_flow_style=False) explicit_end=True,
default_flow_style=False)
# Click isn't splitting these lines correctly, so do it manually # Click isn't splitting these lines correctly, so do it manually
for line in res.split('\n'): for line in res.split('\n'):
click.echo(line) click.echo(line)
@ -82,10 +82,11 @@ def _collect_to_file(site_name, save_location):
LOG.debug("Collecting file %s to file %s", filename, save_file) LOG.debug("Collecting file %s to file %s", filename, save_file)
save_files[repo_name].writelines(_read_and_format_yaml(filename)) save_files[repo_name].writelines(_read_and_format_yaml(filename))
save_files[curr_site_repo].writelines( save_files[curr_site_repo].writelines(
yaml.safe_dump(_get_deployment_data_doc(), yaml.safe_dump(
default_flow_style=False, _get_deployment_data_doc(),
explicit_start=True, default_flow_style=False,
explicit_end=True)) explicit_start=True,
explicit_end=True))
except Exception as ex: except Exception as ex:
raise click.ClickException("Error saving output: %s" % str(ex)) raise click.ClickException("Error saving output: %s" % str(ex))
finally: finally:
@ -121,16 +122,19 @@ def render(site_name, output_stream, validate):
raise click.ClickException(err_msg) raise click.ClickException(err_msg)
if output_stream: if output_stream:
files.dump_all(rendered_documents, files.dump_all(
output_stream, rendered_documents,
default_flow_style=False, output_stream,
explicit_start=True, default_flow_style=False,
explicit_end=True) explicit_start=True,
explicit_end=True)
else: else:
click.echo(yaml.dump_all(rendered_documents, click.echo(
default_flow_style=False, yaml.dump_all(
explicit_start=True, rendered_documents,
explicit_end=True)) default_flow_style=False,
explicit_start=True,
explicit_end=True))
def list_(output_stream): def list_(output_stream):

View File

@ -11,6 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
"""Utility functions for catalog files such as pki-catalog.yaml.""" """Utility functions for catalog files such as pki-catalog.yaml."""
import logging import logging
@ -63,6 +64,7 @@ def iterate(kind, sitename=None, documents=None):
if schema == 'pegleg/%s/v1' % kind: if schema == 'pegleg/%s/v1' % kind:
yield document yield document
elif schema == 'promenade/%s/v1' % kind: elif schema == 'promenade/%s/v1' % kind:
LOG.warning('The schema promenade/%s/v1 is deprecated. Use ' LOG.warning(
'pegleg/%s/v1 instead.', kind, kind) 'The schema promenade/%s/v1 is deprecated. Use '
'pegleg/%s/v1 instead.', kind, kind)
yield document yield document

View File

@ -19,7 +19,6 @@ __all__ = ['CryptoString']
class CryptoString(object): class CryptoString(object):
def __init__(self): def __init__(self):
punctuation = '@#&-+=?' punctuation = '@#&-+=?'
self._pool = string.ascii_letters + string.digits + punctuation self._pool = string.ascii_letters + string.digits + punctuation
@ -96,8 +95,9 @@ class CryptoString(object):
""" """
while True: while True:
crypto_str = ''.join(self._random.choice(self._pool) crypto_str = ''.join(
for _ in range(max(24, length))) self._random.choice(self._pool)
for _ in range(max(24, length)))
if self.validate_crypto_str(crypto_str): if self.validate_crypto_str(crypto_str):
break break

View File

@ -12,7 +12,6 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from deckhand.engine import document_validation from deckhand.engine import document_validation
from deckhand.engine import layering from deckhand.engine import layering
from deckhand import errors as dh_errors from deckhand import errors as dh_errors
@ -34,17 +33,18 @@ def load_schemas_from_docs(documents):
if document.get('schema', '') == SCHEMA_SCHEMA: if document.get('schema', '') == SCHEMA_SCHEMA:
name = document['metadata']['name'] name = document['metadata']['name']
if name in schema_set: if name in schema_set:
errors.append((DECKHAND_DUPLICATE_SCHEMA, errors.append(
'Duplicate schema specified for: %s' % name)) (
DECKHAND_DUPLICATE_SCHEMA,
'Duplicate schema specified for: %s' % name))
schema_set[name] = document['data'] schema_set[name] = document['data']
return schema_set, errors return schema_set, errors
def deckhand_render(documents=None, def deckhand_render(
fail_on_missing_sub_src=False, documents=None, fail_on_missing_sub_src=False, validate=True):
validate=True):
documents = documents or [] documents = documents or []
errors = [] errors = []
rendered_documents = [] rendered_documents = []
@ -65,15 +65,17 @@ def deckhand_render(documents=None,
for result in results: for result in results:
if result['errors']: if result['errors']:
errors.append( errors.append(
(DECKHAND_RENDER_EXCEPTION, (
'During rendering Deckhand was unable to validate ' DECKHAND_RENDER_EXCEPTION,
'the following document, details: %s.' % ( 'During rendering Deckhand was unable to validate '
result['errors']))) 'the following document, details: %s.' %
(result['errors'])))
except dh_errors.DeckhandException as e: except dh_errors.DeckhandException as e:
errors.append( errors.append(
(DECKHAND_RENDER_EXCEPTION, (
'An unknown Deckhand exception occurred while trying' DECKHAND_RENDER_EXCEPTION,
' to render documents: %s. Details: %s.' % (str(e), 'An unknown Deckhand exception occurred while trying'
e.error_list))) ' to render documents: %s. Details: %s.' %
(str(e), e.error_list)))
return rendered_documents, errors return rendered_documents, errors

View File

@ -11,6 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
"""Utility functions for site-definition.yaml files.""" """Utility functions for site-definition.yaml files."""
import os import os
@ -55,8 +56,8 @@ def path(site_name, primary_repo_base=None):
"""Retrieve path to the site-definition.yaml file for ``site_name``.""" """Retrieve path to the site-definition.yaml file for ``site_name``."""
if not primary_repo_base: if not primary_repo_base:
primary_repo_base = config.get_site_repo() primary_repo_base = config.get_site_repo()
return os.path.join(primary_repo_base, 'site', site_name, return os.path.join(
'site-definition.yaml') primary_repo_base, 'site', site_name, 'site-definition.yaml')
def pluck(site_definition, key): def pluck(site_definition, key):
@ -64,8 +65,9 @@ def pluck(site_definition, key):
return site_definition['data'][key] return site_definition['data'][key]
except Exception as e: except Exception as e:
site_name = site_definition.get('metadata', {}).get('name') site_name = site_definition.get('metadata', {}).get('name')
raise click.ClickException('failed to get "%s" from site definition ' raise click.ClickException(
'"%s": %s' % (key, site_name, e)) 'failed to get "%s" from site definition '
'"%s": %s' % (key, site_name, e))
def site_files(site_name): def site_files(site_name):

View File

@ -27,11 +27,12 @@ LOG = logging.getLogger(__name__)
__all__ = ('encrypt', 'decrypt') __all__ = ('encrypt', 'decrypt')
def encrypt(unencrypted_data, def encrypt(
passphrase, unencrypted_data,
salt, passphrase,
key_length=KEY_LENGTH, salt,
iterations=ITERATIONS): key_length=KEY_LENGTH,
iterations=ITERATIONS):
""" """
Encrypt the data, using the provided passphrase and salt, Encrypt the data, using the provided passphrase and salt,
and return the encrypted data. and return the encrypted data.
@ -56,15 +57,17 @@ def encrypt(unencrypted_data,
:rtype: bytes :rtype: bytes
""" """
return fernet.Fernet(_generate_key( return fernet.Fernet(
passphrase, salt, key_length, iterations)).encrypt(unencrypted_data) _generate_key(passphrase, salt, key_length,
iterations)).encrypt(unencrypted_data)
def decrypt(encrypted_data, def decrypt(
passphrase, encrypted_data,
salt, passphrase,
key_length=KEY_LENGTH, salt,
iterations=ITERATIONS): key_length=KEY_LENGTH,
iterations=ITERATIONS):
""" """
Decrypt the data, using the provided passphrase and salt, Decrypt the data, using the provided passphrase and salt,
and return the decrypted data. and return the decrypted data.
@ -92,11 +95,13 @@ def decrypt(encrypted_data,
""" """
try: try:
return fernet.Fernet(_generate_key( return fernet.Fernet(
passphrase, salt, key_length, iterations)).decrypt(encrypted_data) _generate_key(passphrase, salt, key_length,
iterations)).decrypt(encrypted_data)
except fernet.InvalidToken: except fernet.InvalidToken:
LOG.error('Signature verification to decrypt secrets failed. Please ' LOG.error(
'check your provided passphrase and salt and try again.') 'Signature verification to decrypt secrets failed. Please '
'check your provided passphrase and salt and try again.')
raise raise

View File

@ -53,10 +53,11 @@ DIR_DEPTHS = {
def all(): def all():
return search([ return search(
os.path.join(r, k) for r in config.all_repos() [
for k in DIR_DEPTHS.keys() os.path.join(r, k) for r in config.all_repos()
]) for k in DIR_DEPTHS.keys()
])
def create_global_directories(): def create_global_directories():
@ -183,8 +184,8 @@ def list_sites(primary_repo_base=None):
"""Get a list of site definition directories in the primary repo.""" """Get a list of site definition directories in the primary repo."""
if not primary_repo_base: if not primary_repo_base:
primary_repo_base = config.get_site_repo() primary_repo_base = config.get_site_repo()
full_site_path = os.path.join(primary_repo_base, full_site_path = os.path.join(
config.get_rel_site_path()) primary_repo_base, config.get_rel_site_path())
for path in os.listdir(full_site_path): for path in os.listdir(full_site_path):
joined_path = os.path.join(full_site_path, path) joined_path = os.path.join(full_site_path, path)
if os.path.isdir(joined_path): if os.path.isdir(joined_path):
@ -195,8 +196,8 @@ def list_types(primary_repo_base=None):
"""Get a list of type directories in the primary repo.""" """Get a list of type directories in the primary repo."""
if not primary_repo_base: if not primary_repo_base:
primary_repo_base = config.get_site_repo() primary_repo_base = config.get_site_repo()
full_type_path = os.path.join(primary_repo_base, full_type_path = os.path.join(
config.get_rel_type_path()) primary_repo_base, config.get_rel_type_path())
for path in os.listdir(full_type_path): for path in os.listdir(full_type_path):
joined_path = os.path.join(full_type_path, path) joined_path = os.path.join(full_type_path, path)
if os.path.isdir(joined_path): if os.path.isdir(joined_path):
@ -327,17 +328,19 @@ def write(data, file_path):
elif isinstance(data, (dict, collections.abc.Iterable)): elif isinstance(data, (dict, collections.abc.Iterable)):
if isinstance(data, dict): if isinstance(data, dict):
data = [data] data = [data]
yaml.safe_dump_all(data, yaml.safe_dump_all(
stream, data,
explicit_start=True, stream,
explicit_end=True, explicit_start=True,
default_flow_style=False) explicit_end=True,
default_flow_style=False)
else: else:
raise ValueError('data must be str or dict, ' raise ValueError(
'not {}'.format(type(data))) 'data must be str or dict, '
'not {}'.format(type(data)))
except EnvironmentError as e: except EnvironmentError as e:
raise click.ClickError("Couldn't write data to {}: {}".format( raise click.ClickError(
file_path, e)) "Couldn't write data to {}: {}".format(file_path, e))
def _recurse_subdirs(search_path, depth): def _recurse_subdirs(search_path, depth):
@ -349,8 +352,8 @@ def _recurse_subdirs(search_path, depth):
if depth == 1: if depth == 1:
directories.add(joined_path) directories.add(joined_path)
else: else:
directories.update(_recurse_subdirs( directories.update(
joined_path, depth - 1)) _recurse_subdirs(joined_path, depth - 1))
except FileNotFoundError: except FileNotFoundError:
pass pass
return directories return directories

View File

@ -26,15 +26,13 @@ from pegleg.engine import exceptions
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
__all__ = ('git_handler', 'is_repository', 'is_equal', 'repo_url', 'repo_name', __all__ = (
'normalize_repo_path') 'git_handler', 'is_repository', 'is_equal', 'repo_url', 'repo_name',
'normalize_repo_path')
def git_handler(repo_url, def git_handler(
ref=None, repo_url, ref=None, proxy_server=None, auth_key=None, clone_path=None):
proxy_server=None,
auth_key=None,
clone_path=None):
"""Handle directories that are Git repositories. """Handle directories that are Git repositories.
If ``repo_url`` is a valid URL for which a local repository doesn't If ``repo_url`` is a valid URL for which a local repository doesn't
@ -80,12 +78,13 @@ def git_handler(repo_url,
# we need to clone the repo_url first since it doesn't exist and then # we need to clone the repo_url first since it doesn't exist and then
# checkout the appropriate reference - and return the tmpdir # checkout the appropriate reference - and return the tmpdir
if parsed_url.scheme in supported_clone_protocols: if parsed_url.scheme in supported_clone_protocols:
return _try_git_clone(repo_url, ref, proxy_server, auth_key, return _try_git_clone(
clone_path) repo_url, ref, proxy_server, auth_key, clone_path)
else: else:
raise ValueError('repo_url=%s must use one of the following ' raise ValueError(
'protocols: %s' % 'repo_url=%s must use one of the following '
(repo_url, ', '.join(supported_clone_protocols))) 'protocols: %s' %
(repo_url, ', '.join(supported_clone_protocols)))
# otherwise, we're dealing with a local directory so although # otherwise, we're dealing with a local directory so although
# we do not need to clone, we may need to process the reference # we do not need to clone, we may need to process the reference
# by checking that out and returning the directory they passed in # by checking that out and returning the directory they passed in
@ -112,8 +111,9 @@ def git_handler(repo_url,
try: try:
# Check whether the ref exists locally. # Check whether the ref exists locally.
LOG.info('Attempting to checkout ref=%s from repo_url=%s locally', LOG.info(
ref, repo_url) 'Attempting to checkout ref=%s from repo_url=%s locally', ref,
repo_url)
_try_git_checkout(repo, repo_url, ref, fetch=False) _try_git_checkout(repo, repo_url, ref, fetch=False)
except exceptions.GitException: except exceptions.GitException:
# Otherwise, attempt to fetch and checkout the missing ref. # Otherwise, attempt to fetch and checkout the missing ref.
@ -155,11 +155,8 @@ def get_remote_url(repo_url):
return None return None
def _try_git_clone(repo_url, def _try_git_clone(
ref=None, repo_url, ref=None, proxy_server=None, auth_key=None, clone_path=None):
proxy_server=None,
auth_key=None,
clone_path=None):
"""Try cloning Git repo from ``repo_url`` using the reference ``ref``. """Try cloning Git repo from ``repo_url`` using the reference ``ref``.
:param repo_url: URL of remote Git repo or path to local Git repo. :param repo_url: URL of remote Git repo or path to local Git repo.
@ -202,27 +199,28 @@ def _try_git_clone(repo_url,
LOG.debug('Cloning [%s] with proxy [%s]', repo_url, proxy_server) LOG.debug('Cloning [%s] with proxy [%s]', repo_url, proxy_server)
# TODO(felipemonteiro): proxy_server can be finicky. Need a config # TODO(felipemonteiro): proxy_server can be finicky. Need a config
# option to retry up to N times. # option to retry up to N times.
repo = Repo.clone_from(repo_url, repo = Repo.clone_from(
temp_dir, repo_url,
config='http.proxy=%s' % proxy_server, temp_dir,
env=env_vars) config='http.proxy=%s' % proxy_server,
env=env_vars)
else: else:
LOG.debug('Cloning [%s]', repo_url) LOG.debug('Cloning [%s]', repo_url)
repo = Repo.clone_from(repo_url, temp_dir, env=env_vars) repo = Repo.clone_from(repo_url, temp_dir, env=env_vars)
except git_exc.GitCommandError as e: except git_exc.GitCommandError as e:
LOG.exception('Failed to clone repo_url=%s using ref=%s.', repo_url, LOG.exception(
ref) 'Failed to clone repo_url=%s using ref=%s.', repo_url, ref)
if (ssh_cmd and ssh_cmd in e.stderr if (ssh_cmd and ssh_cmd in e.stderr
or 'permission denied' in e.stderr.lower()): or 'permission denied' in e.stderr.lower()):
raise exceptions.GitAuthException(repo_url=repo_url, raise exceptions.GitAuthException(
ssh_key_path=auth_key) repo_url=repo_url, ssh_key_path=auth_key)
elif 'could not resolve proxy' in e.stderr.lower(): elif 'could not resolve proxy' in e.stderr.lower():
raise exceptions.GitProxyException(location=proxy_server) raise exceptions.GitProxyException(location=proxy_server)
else: else:
raise exceptions.GitException(location=repo_url, details=e) raise exceptions.GitException(location=repo_url, details=e)
except Exception as e: except Exception as e:
LOG.exception('Encountered unknown Exception during clone of %s', LOG.exception(
repo_url) 'Encountered unknown Exception during clone of %s', repo_url)
raise exceptions.GitException(location=repo_url, details=e) raise exceptions.GitException(location=repo_url, details=e)
_try_git_checkout(repo=repo, repo_url=repo_url, ref=ref) _try_git_checkout(repo=repo, repo_url=repo_url, ref=ref)
@ -296,32 +294,28 @@ def _try_git_checkout(repo, repo_url, ref=None, fetch=True):
# for each so that future checkouts can be performed using either # for each so that future checkouts can be performed using either
# format. This way, no future processing is required to figure # format. This way, no future processing is required to figure
# out whether a refpath/hexsha exists within the repo. # out whether a refpath/hexsha exists within the repo.
_create_local_ref(g, _create_local_ref(
branches, g, branches, ref=ref, newref=hexsha, reftype='hexsha')
ref=ref, _create_local_ref(
newref=hexsha, g, branches, ref=ref, newref=ref_path, reftype='refpath')
reftype='hexsha')
_create_local_ref(g,
branches,
ref=ref,
newref=ref_path,
reftype='refpath')
_create_or_checkout_local_ref(g, branches, ref=ref) _create_or_checkout_local_ref(g, branches, ref=ref)
else: else:
LOG.debug('Checking out ref=%s from local repo_url=%s', ref, LOG.debug(
repo_url) 'Checking out ref=%s from local repo_url=%s', ref, repo_url)
# Expect the reference to exist if checking out locally. # Expect the reference to exist if checking out locally.
g.checkout(ref) g.checkout(ref)
LOG.debug('Successfully checked out ref=%s for repo_url=%s', ref, LOG.debug(
repo_url) 'Successfully checked out ref=%s for repo_url=%s', ref, repo_url)
except git_exc.GitCommandError as e: except git_exc.GitCommandError as e:
LOG.exception('Failed to checkout ref=%s from repo_url=%s.', ref, LOG.exception(
repo_url) 'Failed to checkout ref=%s from repo_url=%s.', ref, repo_url)
raise exceptions.GitException(location=repo_url, details=e) raise exceptions.GitException(location=repo_url, details=e)
except Exception as e: except Exception as e:
LOG.exception(('Encountered unknown Exception during checkout of ' LOG.exception(
'ref=%s for repo_url=%s'), ref, repo_url) (
'Encountered unknown Exception during checkout of '
'ref=%s for repo_url=%s'), ref, repo_url)
raise exceptions.GitException(location=repo_url, details=e) raise exceptions.GitException(location=repo_url, details=e)
@ -338,8 +332,9 @@ def _create_or_checkout_local_ref(g, branches, ref):
def _create_local_ref(g, branches, ref, newref, reftype=None): def _create_local_ref(g, branches, ref, newref, reftype=None):
if newref not in branches: if newref not in branches:
if newref and ref != newref: if newref and ref != newref:
LOG.debug('Creating local branch for ref=%s (%s for %s)', newref, LOG.debug(
reftype, ref) 'Creating local branch for ref=%s (%s for %s)', newref,
reftype, ref)
g.checkout('FETCH_HEAD', b=newref) g.checkout('FETCH_HEAD', b=newref)
branches.append(newref) branches.append(newref)

View File

@ -53,8 +53,8 @@ class PeglegManagedSecretsDocument(object):
if self.is_pegleg_managed_secret(document): if self.is_pegleg_managed_secret(document):
self._pegleg_document = document self._pegleg_document = document
else: else:
self._pegleg_document = self.__wrap(document, generated, catalog, self._pegleg_document = self.__wrap(
author) document, generated, catalog, author)
self._embedded_document = \ self._embedded_document = \
self._pegleg_document['data']['managedDocument'] self._pegleg_document['data']['managedDocument']

View File

@ -30,13 +30,14 @@ LOG = logging.getLogger(__name__)
class PeglegSecretManagement(object): class PeglegSecretManagement(object):
"""An object to handle operations on of a pegleg managed file.""" """An object to handle operations on of a pegleg managed file."""
def __init__(self, def __init__(
file_path=None, self,
docs=None, file_path=None,
generated=False, docs=None,
catalog=None, generated=False,
author=None, catalog=None,
site_name=None): author=None,
site_name=None):
""" """
Read the source file and the environment data needed to wrap and Read the source file and the environment data needed to wrap and
process the file documents as pegleg managed document. process the file documents as pegleg managed document.
@ -56,12 +57,14 @@ class PeglegSecretManagement(object):
config.set_global_enc_keys(site_name) config.set_global_enc_keys(site_name)
if all([file_path, docs]) or not any([file_path, docs]): if all([file_path, docs]) or not any([file_path, docs]):
raise ValueError('Either `file_path` or `docs` must be ' raise ValueError(
'specified.') 'Either `file_path` or `docs` must be '
'specified.')
if generated and not (catalog and author): if generated and not (catalog and author):
raise ValueError("If the document is generated, author and " raise ValueError(
"catalog must be specified.") "If the document is generated, author and "
"catalog must be specified.")
self.file_path = file_path self.file_path = file_path
self.documents = list() self.documents = list()
@ -70,10 +73,11 @@ class PeglegSecretManagement(object):
if docs: if docs:
for doc in docs: for doc in docs:
self.documents.append( self.documents.append(
PeglegManagedSecret(doc, PeglegManagedSecret(
generated=generated, doc,
catalog=catalog, generated=generated,
author=author)) catalog=catalog,
author=author))
else: else:
self.file_path = file_path self.file_path = file_path
for doc in files.read(file_path): for doc in files.read(file_path):
@ -109,9 +113,10 @@ class PeglegSecretManagement(object):
files.write(doc_list, save_path) files.write(doc_list, save_path)
click.echo('Wrote encrypted data to: {}'.format(save_path)) click.echo('Wrote encrypted data to: {}'.format(save_path))
else: else:
LOG.debug('All documents in file: {} are either already encrypted ' LOG.debug(
'or have cleartext storage policy. ' 'All documents in file: {} are either already encrypted '
'Skipping.'.format(self.file_path)) 'or have cleartext storage policy. '
'Skipping.'.format(self.file_path))
def get_encrypted_secrets(self): def get_encrypted_secrets(self):
""" """
@ -121,10 +126,11 @@ class PeglegSecretManagement(object):
:rtype encrypted_docs: bool :rtype encrypted_docs: bool
""" """
if self._generated and not self._author: if self._generated and not self._author:
raise ValueError("An author is needed to encrypt " raise ValueError(
"generated documents. " "An author is needed to encrypt "
"Specify it when PeglegSecretManagement " "generated documents. "
"is initialized.") "Specify it when PeglegSecretManagement "
"is initialized.")
encrypted_docs = False encrypted_docs = False
doc_list = [] doc_list = []
@ -165,10 +171,11 @@ class PeglegSecretManagement(object):
secrets = self.get_decrypted_secrets() secrets = self.get_decrypted_secrets()
return yaml.safe_dump_all(secrets, return yaml.safe_dump_all(
explicit_start=True, secrets,
explicit_end=True, explicit_start=True,
default_flow_style=False) explicit_end=True,
default_flow_style=False)
def get_decrypted_secrets(self): def get_decrypted_secrets(self):
""" """

View File

@ -16,16 +16,15 @@ import json
import logging import logging
import uuid import uuid
from shipyard_client.api_client.shipyard_api_client import ShipyardClient
from shipyard_client.api_client.shipyardclient_context import \
ShipyardClientContext
import yaml import yaml
from pegleg.engine import exceptions from pegleg.engine import exceptions
from pegleg.engine.util import files from pegleg.engine.util import files
from pegleg.engine.util.pegleg_secret_management import PeglegSecretManagement from pegleg.engine.util.pegleg_secret_management import PeglegSecretManagement
from shipyard_client.api_client.shipyard_api_client import ShipyardClient
from shipyard_client.api_client.shipyardclient_context import \
ShipyardClientContext
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
@ -87,8 +86,8 @@ class ShipyardHelper(object):
docs=collected_documents[document]) docs=collected_documents[document])
decrypted_documents = pegleg_secret_mgmt.get_decrypted_secrets() decrypted_documents = pegleg_secret_mgmt.get_decrypted_secrets()
collection_data.extend(decrypted_documents) collection_data.extend(decrypted_documents)
collection_as_yaml = yaml.dump_all(collection_data, collection_as_yaml = yaml.dump_all(
Dumper=yaml.SafeDumper) collection_data, Dumper=yaml.SafeDumper)
# Append flag is not required for the first # Append flag is not required for the first
# collection being uploaded to Shipyard. It # collection being uploaded to Shipyard. It
@ -103,16 +102,14 @@ class ShipyardHelper(object):
resp_text = self.api_client.post_configdocs( resp_text = self.api_client.post_configdocs(
collection_id=self.collection, collection_id=self.collection,
buffer_mode=buffer_mode, buffer_mode=buffer_mode,
document_data=collection_as_yaml document_data=collection_as_yaml)
)
except AuthValuesError as ave: except AuthValuesError as ave:
resp_text = "Error: {}".format(ave.diagnostic) resp_text = "Error: {}".format(ave.diagnostic)
raise DocumentUploadError(resp_text) raise DocumentUploadError(resp_text)
except Exception as ex: except Exception as ex:
resp_text = ( resp_text = (
"Error: Unable to invoke action due to: {}" "Error: Unable to invoke action due to: {}".format(str(ex)))
.format(str(ex)))
LOG.debug(resp_text, exc_info=True) LOG.debug(resp_text, exc_info=True)
raise DocumentUploadError(resp_text) raise DocumentUploadError(resp_text)
@ -143,8 +140,7 @@ class ShipyardHelper(object):
try: try:
resp_text = self.formatted_response_handler( resp_text = self.formatted_response_handler(
self.api_client.commit_configdocs() self.api_client.commit_configdocs())
)
except Exception as ex: except Exception as ex:
resp_text = ( resp_text = (
"Error: Unable to invoke action due to: {}".format(str(ex))) "Error: Unable to invoke action due to: {}".format(str(ex)))
@ -162,10 +158,11 @@ class ShipyardHelper(object):
'--os-{}'.format(var.replace('_', '-'))) '--os-{}'.format(var.replace('_', '-')))
if err_txt: if err_txt:
for var in self.auth_vars: for var in self.auth_vars:
if (self.auth_vars.get(var) is None and if (self.auth_vars.get(var) is None
var not in required_auth_vars): and var not in required_auth_vars):
err_txt.append('- Also not set: --os-{}'.format( err_txt.append(
var.replace('_', '-'))) '- Also not set: --os-{}'.format(
var.replace('_', '-')))
raise AuthValuesError(diagnostic='\n'.join(err_txt)) raise AuthValuesError(diagnostic='\n'.join(err_txt))
def formatted_response_handler(self, response): def formatted_response_handler(self, response):
@ -176,6 +173,5 @@ class ShipyardHelper(object):
return json.dumps(response.json(), indent=4) return json.dumps(response.json(), indent=4)
except ValueError: except ValueError:
return ( return (
"This is not json and could not be printed as such. \n" + "This is not json and could not be printed as such. \n"
response.text + response.text)
)

View File

@ -6,10 +6,11 @@ pytest-xdist==1.23.2
mock==2.0.0 mock==2.0.0
# Formatting # Formatting
yapf==0.20.0 yapf==0.27.0
# Linting # Linting
hacking==1.1.0 hacking==1.1.0
flake8-import-order==0.18.1
# Security # Security
bandit==1.6.0 bandit==1.6.0

View File

@ -46,7 +46,8 @@ _SITE_TEST_STRUCTURE = {
'files': {} 'files': {}
} }
_SITE_DEFINITION = textwrap.dedent(""" _SITE_DEFINITION = textwrap.dedent(
"""
--- ---
schema: pegleg/SiteDefinition/v1 schema: pegleg/SiteDefinition/v1
metadata: metadata:
@ -67,7 +68,8 @@ _CA_KEY_NAME = "kubernetes"
_CERT_KEY_NAME = "kubelet-n3" _CERT_KEY_NAME = "kubelet-n3"
_KEYPAIR_KEY_NAME = "service-account" _KEYPAIR_KEY_NAME = "service-account"
_PKI_CATALOG_CAS = textwrap.dedent(""" _PKI_CATALOG_CAS = textwrap.dedent(
"""
--- ---
schema: pegleg/PKICatalog/v1 schema: pegleg/PKICatalog/v1
metadata: metadata:
@ -84,7 +86,8 @@ _PKI_CATALOG_CAS = textwrap.dedent("""
... ...
""" % _CA_KEY_NAME) """ % _CA_KEY_NAME)
_PKI_CATALOG_CERTS = textwrap.dedent(""" _PKI_CATALOG_CERTS = textwrap.dedent(
"""
--- ---
schema: pegleg/PKICatalog/v1 schema: pegleg/PKICatalog/v1
metadata: metadata:
@ -109,7 +112,8 @@ _PKI_CATALOG_CERTS = textwrap.dedent("""
... ...
""" % (_CA_KEY_NAME, _CERT_KEY_NAME)) """ % (_CA_KEY_NAME, _CERT_KEY_NAME))
_PKI_CATALOG_KEYPAIRS = textwrap.dedent(""" _PKI_CATALOG_KEYPAIRS = textwrap.dedent(
"""
--- ---
schema: pegleg/PKICatalog/v1 schema: pegleg/PKICatalog/v1
metadata: metadata:
@ -128,7 +132,8 @@ _PKI_CATALOG_KEYPAIRS = textwrap.dedent("""
... ...
""" % _KEYPAIR_KEY_NAME) """ % _KEYPAIR_KEY_NAME)
_PKI_CATALOG_EVERYTHING = textwrap.dedent(""" _PKI_CATALOG_EVERYTHING = textwrap.dedent(
"""
--- ---
schema: pegleg/PKICatalog/v1 schema: pegleg/PKICatalog/v1
metadata: metadata:
@ -273,8 +278,9 @@ class TestPKIGenerator(object):
] ]
def _filter_keypairs(x): def _filter_keypairs(x):
return (x['data']['managedDocument']['schema'] in return (
valid_keypair_schemas) x['data']['managedDocument']['schema'] in valid_keypair_schemas
)
keypairs = list(filter(_filter_keypairs, documents)) keypairs = list(filter(_filter_keypairs, documents))
self._validate_documents( self._validate_documents(

View File

@ -127,8 +127,8 @@ class TestPKIUtility(object):
ca_config['signing']['default']['expiry'] = '1h' ca_config['signing']['default']['expiry'] = '1h'
m_callable = mock.PropertyMock(return_value=json.dumps(ca_config)) m_callable = mock.PropertyMock(return_value=json.dumps(ca_config))
with mock.patch.object( with mock.patch.object(pki_utility.PKIUtility, 'ca_config',
pki_utility.PKIUtility, 'ca_config', new_callable=m_callable): new_callable=m_callable):
ca_cert_wrapper, ca_key_wrapper = pki_obj.generate_ca( ca_cert_wrapper, ca_key_wrapper = pki_obj.generate_ca(
self.__class__.__name__) self.__class__.__name__)
ca_cert = ca_cert_wrapper['data']['managedDocument'] ca_cert = ca_cert_wrapper['data']['managedDocument']
@ -155,8 +155,8 @@ class TestPKIUtility(object):
ca_config['signing']['default']['expiry'] = '1s' ca_config['signing']['default']['expiry'] = '1s'
m_callable = mock.PropertyMock(return_value=json.dumps(ca_config)) m_callable = mock.PropertyMock(return_value=json.dumps(ca_config))
with mock.patch.object( with mock.patch.object(pki_utility.PKIUtility, 'ca_config',
pki_utility.PKIUtility, 'ca_config', new_callable=m_callable): new_callable=m_callable):
ca_cert_wrapper, ca_key_wrapper = pki_obj.generate_ca( ca_cert_wrapper, ca_key_wrapper = pki_obj.generate_ca(
self.__class__.__name__) self.__class__.__name__)
ca_cert = ca_cert_wrapper['data']['managedDocument'] ca_cert = ca_cert_wrapper['data']['managedDocument']

View File

@ -104,16 +104,18 @@ def test_no_encryption_key(temp_path):
os.makedirs(config_dir) os.makedirs(config_dir)
files.write(config_data, config_path) files.write(config_data, config_path)
files.write(yaml.safe_load_all(SITE_DEFINITION), files.write(
os.path.join(config_dir, "site-definition.yaml")) yaml.safe_load_all(SITE_DEFINITION),
os.path.join(config_dir, "site-definition.yaml"))
with pytest.raises(GenesisBundleEncryptionException, with pytest.raises(GenesisBundleEncryptionException,
match=r'.*no encryption policy or key is specified.*'): match=r'.*no encryption policy or key is specified.*'):
bundle.build_genesis(build_path=build_dir, bundle.build_genesis(
encryption_key=None, build_path=build_dir,
validators=False, encryption_key=None,
debug=logging.ERROR, validators=False,
site_name="test_site") debug=logging.ERROR,
site_name="test_site")
@mock.patch.dict( @mock.patch.dict(
@ -132,13 +134,15 @@ def test_failed_deckhand_validation(temp_path):
build_dir = os.path.join(temp_path, 'build_dir') build_dir = os.path.join(temp_path, 'build_dir')
os.makedirs(config_dir) os.makedirs(config_dir)
files.write(config_data, config_path) files.write(config_data, config_path)
files.write(yaml.safe_load_all(SITE_DEFINITION), files.write(
os.path.join(config_dir, "site-definition.yaml")) yaml.safe_load_all(SITE_DEFINITION),
os.path.join(config_dir, "site-definition.yaml"))
key = 'MyverYSecretEncryptionKey382803' key = 'MyverYSecretEncryptionKey382803'
with pytest.raises(GenesisBundleGenerateException, with pytest.raises(GenesisBundleGenerateException,
match=r'.*failed on deckhand validation.*'): match=r'.*failed on deckhand validation.*'):
bundle.build_genesis(build_path=build_dir, bundle.build_genesis(
encryption_key=key, build_path=build_dir,
validators=False, encryption_key=key,
debug=logging.ERROR, validators=False,
site_name="test_site") debug=logging.ERROR,
site_name="test_site")

View File

@ -29,7 +29,8 @@ from pegleg.engine.util import encryption
from pegleg.engine import util from pegleg.engine import util
import pegleg import pegleg
TEST_PASSPHRASES_CATALOG = yaml.safe_load(""" TEST_PASSPHRASES_CATALOG = yaml.safe_load(
"""
--- ---
schema: pegleg/PassphraseCatalog/v1 schema: pegleg/PassphraseCatalog/v1
metadata: metadata:
@ -67,7 +68,8 @@ data:
... ...
""") """)
TEST_GLOBAL_PASSPHRASES_CATALOG = yaml.safe_load(""" TEST_GLOBAL_PASSPHRASES_CATALOG = yaml.safe_load(
"""
--- ---
schema: pegleg/PassphraseCatalog/v1 schema: pegleg/PassphraseCatalog/v1
metadata: metadata:
@ -85,7 +87,8 @@ data:
... ...
""") """)
TEST_BASE64_PASSPHRASES_CATALOG = yaml.safe_load(""" TEST_BASE64_PASSPHRASES_CATALOG = yaml.safe_load(
"""
--- ---
schema: pegleg/PassphraseCatalog/v1 schema: pegleg/PassphraseCatalog/v1
metadata: metadata:
@ -119,8 +122,9 @@ TEST_REPOSITORIES = {
}, },
'secrets': { 'secrets': {
'revision': 'master', 'revision': 'master',
'url': ('ssh://REPO_USERNAME@gerrit:29418/aic-clcp-security-' 'url': (
'manifests.git') 'ssh://REPO_USERNAME@gerrit:29418/aic-clcp-security-'
'manifests.git')
} }
} }
} }
@ -143,8 +147,12 @@ TEST_SITE_DEFINITION = {
} }
TEST_SITE_DOCUMENTS = [TEST_SITE_DEFINITION, TEST_PASSPHRASES_CATALOG] TEST_SITE_DOCUMENTS = [TEST_SITE_DEFINITION, TEST_PASSPHRASES_CATALOG]
TEST_GLOBAL_SITE_DOCUMENTS = [TEST_SITE_DEFINITION, TEST_GLOBAL_PASSPHRASES_CATALOG] TEST_GLOBAL_SITE_DOCUMENTS = [
TEST_BASE64_SITE_DOCUMENTS = [TEST_SITE_DEFINITION, TEST_BASE64_PASSPHRASES_CATALOG] TEST_SITE_DEFINITION, TEST_GLOBAL_PASSPHRASES_CATALOG
]
TEST_BASE64_SITE_DOCUMENTS = [
TEST_SITE_DEFINITION, TEST_BASE64_PASSPHRASES_CATALOG
]
@mock.patch.object( @mock.patch.object(
@ -162,10 +170,13 @@ TEST_BASE64_SITE_DOCUMENTS = [TEST_SITE_DEFINITION, TEST_BASE64_PASSPHRASES_CATA
'site_files', 'site_files',
autospec=True, autospec=True,
return_value=[ return_value=[
'cicd_site_repo/site/cicd/passphrases/passphrase-catalog.yaml', ]) 'cicd_site_repo/site/cicd/passphrases/passphrase-catalog.yaml',
@mock.patch.dict(os.environ, { ])
'PEGLEG_PASSPHRASE': 'ytrr89erARAiPE34692iwUMvWqqBvC', @mock.patch.dict(
'PEGLEG_SALT': 'MySecretSalt1234567890]['}) os.environ, {
'PEGLEG_PASSPHRASE': 'ytrr89erARAiPE34692iwUMvWqqBvC',
'PEGLEG_SALT': 'MySecretSalt1234567890]['
})
def test_generate_passphrases(*_): def test_generate_passphrases(*_):
_dir = tempfile.mkdtemp() _dir = tempfile.mkdtemp()
os.makedirs(os.path.join(_dir, 'cicd_site_repo'), exist_ok=True) os.makedirs(os.path.join(_dir, 'cicd_site_repo'), exist_ok=True)
@ -173,9 +184,9 @@ def test_generate_passphrases(*_):
for passphrase in TEST_PASSPHRASES_CATALOG['data']['passphrases']: for passphrase in TEST_PASSPHRASES_CATALOG['data']['passphrases']:
passphrase_file_name = '{}.yaml'.format(passphrase['document_name']) passphrase_file_name = '{}.yaml'.format(passphrase['document_name'])
passphrase_file_path = os.path.join(_dir, 'site', 'cicd', 'secrets', passphrase_file_path = os.path.join(
'passphrases', _dir, 'site', 'cicd', 'secrets', 'passphrases',
passphrase_file_name) passphrase_file_name)
assert os.path.isfile(passphrase_file_path) assert os.path.isfile(passphrase_file_path)
with open(passphrase_file_path) as stream: with open(passphrase_file_path) as stream:
doc = yaml.safe_load(stream) doc = yaml.safe_load(stream)
@ -187,7 +198,7 @@ def test_generate_passphrases(*_):
assert doc['data']['generated']['by'] == 'test_author' assert doc['data']['generated']['by'] == 'test_author'
assert 'managedDocument' in doc['data'] assert 'managedDocument' in doc['data']
assert doc['data']['managedDocument']['metadata'][ assert doc['data']['managedDocument']['metadata'][
'storagePolicy'] == 'encrypted' 'storagePolicy'] == 'encrypted'
decrypted_passphrase = encryption.decrypt( decrypted_passphrase = encryption.decrypt(
doc['data']['managedDocument']['data'], doc['data']['managedDocument']['data'],
os.environ['PEGLEG_PASSPHRASE'].encode(), os.environ['PEGLEG_PASSPHRASE'].encode(),
@ -214,10 +225,12 @@ def test_generate_passphrases_exception(capture):
# Decrypt using the wrong key to see to see the InvalidToken error # Decrypt using the wrong key to see to see the InvalidToken error
with pytest.raises(fernet.InvalidToken): with pytest.raises(fernet.InvalidToken):
encryption.decrypt(enc_data, passphrase2, salt2) encryption.decrypt(enc_data, passphrase2, salt2)
capture.check(('pegleg.engine.util.encryption', 'ERROR', capture.check(
('Signature verification to decrypt secrets failed. ' (
'Please check your provided passphrase and salt and ' 'pegleg.engine.util.encryption', 'ERROR', (
'try again.'))) 'Signature verification to decrypt secrets failed. '
'Please check your provided passphrase and salt and '
'try again.')))
@mock.patch.object( @mock.patch.object(
@ -235,10 +248,13 @@ def test_generate_passphrases_exception(capture):
'site_files', 'site_files',
autospec=True, autospec=True,
return_value=[ return_value=[
'cicd_global_repo/site/cicd/passphrases/passphrase-catalog.yaml', ]) 'cicd_global_repo/site/cicd/passphrases/passphrase-catalog.yaml',
@mock.patch.dict(os.environ, { ])
'PEGLEG_PASSPHRASE': 'ytrr89erARAiPE34692iwUMvWqqBvC', @mock.patch.dict(
'PEGLEG_SALT': 'MySecretSalt1234567890]['}) os.environ, {
'PEGLEG_PASSPHRASE': 'ytrr89erARAiPE34692iwUMvWqqBvC',
'PEGLEG_SALT': 'MySecretSalt1234567890]['
})
def test_global_passphrase_catalog(*_): def test_global_passphrase_catalog(*_):
_dir = tempfile.mkdtemp() _dir = tempfile.mkdtemp()
os.makedirs(os.path.join(_dir, 'cicd_site_repo'), exist_ok=True) os.makedirs(os.path.join(_dir, 'cicd_site_repo'), exist_ok=True)
@ -246,9 +262,9 @@ def test_global_passphrase_catalog(*_):
for passphrase in TEST_GLOBAL_PASSPHRASES_CATALOG['data']['passphrases']: for passphrase in TEST_GLOBAL_PASSPHRASES_CATALOG['data']['passphrases']:
passphrase_file_name = '{}.yaml'.format(passphrase['document_name']) passphrase_file_name = '{}.yaml'.format(passphrase['document_name'])
passphrase_file_path = os.path.join(_dir, 'site', 'cicd', 'secrets', passphrase_file_path = os.path.join(
'passphrases', _dir, 'site', 'cicd', 'secrets', 'passphrases',
passphrase_file_name) passphrase_file_name)
assert os.path.isfile(passphrase_file_path) assert os.path.isfile(passphrase_file_path)
with open(passphrase_file_path) as stream: with open(passphrase_file_path) as stream:
doc = yaml.safe_load(stream) doc = yaml.safe_load(stream)
@ -260,7 +276,7 @@ def test_global_passphrase_catalog(*_):
assert doc['data']['generated']['by'] == 'test_author' assert doc['data']['generated']['by'] == 'test_author'
assert 'managedDocument' in doc['data'] assert 'managedDocument' in doc['data']
assert doc['data']['managedDocument']['metadata'][ assert doc['data']['managedDocument']['metadata'][
'storagePolicy'] == 'encrypted' 'storagePolicy'] == 'encrypted'
decrypted_passphrase = encryption.decrypt( decrypted_passphrase = encryption.decrypt(
doc['data']['managedDocument']['data'], doc['data']['managedDocument']['data'],
os.environ['PEGLEG_PASSPHRASE'].encode(), os.environ['PEGLEG_PASSPHRASE'].encode(),
@ -284,10 +300,13 @@ def test_global_passphrase_catalog(*_):
'site_files', 'site_files',
autospec=True, autospec=True,
return_value=[ return_value=[
'cicd_global_repo/site/cicd/passphrases/passphrase-catalog.yaml', ]) 'cicd_global_repo/site/cicd/passphrases/passphrase-catalog.yaml',
@mock.patch.dict(os.environ, { ])
'PEGLEG_PASSPHRASE': 'ytrr89erARAiPE34692iwUMvWqqBvC', @mock.patch.dict(
'PEGLEG_SALT': 'MySecretSalt1234567890]['}) os.environ, {
'PEGLEG_PASSPHRASE': 'ytrr89erARAiPE34692iwUMvWqqBvC',
'PEGLEG_SALT': 'MySecretSalt1234567890]['
})
def test_base64_passphrase_catalog(*_): def test_base64_passphrase_catalog(*_):
_dir = tempfile.mkdtemp() _dir = tempfile.mkdtemp()
os.makedirs(os.path.join(_dir, 'cicd_site_repo'), exist_ok=True) os.makedirs(os.path.join(_dir, 'cicd_site_repo'), exist_ok=True)
@ -295,9 +314,9 @@ def test_base64_passphrase_catalog(*_):
for passphrase in TEST_BASE64_PASSPHRASES_CATALOG['data']['passphrases']: for passphrase in TEST_BASE64_PASSPHRASES_CATALOG['data']['passphrases']:
passphrase_file_name = '{}.yaml'.format(passphrase['document_name']) passphrase_file_name = '{}.yaml'.format(passphrase['document_name'])
passphrase_file_path = os.path.join(_dir, 'site', 'cicd', 'secrets', passphrase_file_path = os.path.join(
'passphrases', _dir, 'site', 'cicd', 'secrets', 'passphrases',
passphrase_file_name) passphrase_file_name)
assert os.path.isfile(passphrase_file_path) assert os.path.isfile(passphrase_file_path)
with open(passphrase_file_path) as stream: with open(passphrase_file_path) as stream:
doc = yaml.safe_load(stream) doc = yaml.safe_load(stream)
@ -310,23 +329,23 @@ def test_base64_passphrase_catalog(*_):
base64.b64decode(decrypted_passphrase)) base64.b64decode(decrypted_passphrase))
@mock.patch.dict(os.environ, { @mock.patch.dict(
'PEGLEG_PASSPHRASE': 'ytrr89erARAiPE34692iwUMvWqqBvC', os.environ, {
'PEGLEG_SALT': 'MySecretSalt1234567890]['}) 'PEGLEG_PASSPHRASE': 'ytrr89erARAiPE34692iwUMvWqqBvC',
'PEGLEG_SALT': 'MySecretSalt1234567890]['
})
def test_crypt_coding_flow(): def test_crypt_coding_flow():
cs_util = CryptoString() cs_util = CryptoString()
orig_passphrase = cs_util.get_crypto_string() orig_passphrase = cs_util.get_crypto_string()
bytes_passphrase = orig_passphrase.encode() bytes_passphrase = orig_passphrase.encode()
b64_passphrase = base64.b64encode(bytes_passphrase) b64_passphrase = base64.b64encode(bytes_passphrase)
encrypted = encryption.encrypt(b64_passphrase, encrypted = encryption.encrypt(
os.environ['PEGLEG_PASSPHRASE'].encode(), b64_passphrase, os.environ['PEGLEG_PASSPHRASE'].encode(),
os.environ['PEGLEG_SALT'].encode() os.environ['PEGLEG_SALT'].encode())
) decrypted = encryption.decrypt(
decrypted = encryption.decrypt(encrypted, encrypted, os.environ['PEGLEG_PASSPHRASE'].encode(),
os.environ['PEGLEG_PASSPHRASE'].encode(), os.environ['PEGLEG_SALT'].encode())
os.environ['PEGLEG_SALT'].encode()
)
assert encrypted != decrypted assert encrypted != decrypted
assert decrypted == b64_passphrase assert decrypted == b64_passphrase
assert base64.b64decode(decrypted) == bytes_passphrase assert base64.b64decode(decrypted) == bytes_passphrase
assert bytes_passphrase.decode() == orig_passphrase assert bytes_passphrase.decode() == orig_passphrase

View File

@ -37,9 +37,8 @@ def test_verify_deckhand_render_site_documents_separately(
], ],
} }
with mock.patch( with mock.patch('pegleg.engine.util.deckhand.deckhand_render',
'pegleg.engine.util.deckhand.deckhand_render', autospec=True) as mock_render:
autospec=True) as mock_render:
mock_render.return_value = (None, []) mock_render.return_value = (None, [])
result = lint._verify_deckhand_render() result = lint._verify_deckhand_render()
@ -55,79 +54,81 @@ def test_verify_deckhand_render_site_documents_separately(
expected_documents = [] expected_documents = []
for sitename in expected_sitenames: for sitename in expected_sitenames:
documents = [{ documents = [
'data': 'global-common-password', {
'metadata': { 'data': 'global-common-password',
'layeringDefinition': { 'metadata': {
'abstract': False, 'layeringDefinition': {
'layer': 'global' 'abstract': False,
'layer': 'global'
},
'name': 'global-common',
'schema': 'metadata/Document/v1',
'storagePolicy': 'cleartext'
}, },
'name': 'global-common', 'schema': 'deckhand/Passphrase/v1'
'schema': 'metadata/Document/v1', }, {
'storagePolicy': 'cleartext' 'data': 'global-v1.0-password',
}, 'metadata': {
'schema': 'deckhand/Passphrase/v1' 'layeringDefinition': {
}, { 'abstract': False,
'data': 'global-v1.0-password', 'layer': 'global'
'metadata': { },
'layeringDefinition': { 'name': 'global-v1.0',
'abstract': False, 'schema': 'metadata/Document/v1',
'layer': 'global' 'storagePolicy': 'cleartext'
}, },
'name': 'global-v1.0', 'schema': 'deckhand/Passphrase/v1'
'schema': 'metadata/Document/v1', }, {
'storagePolicy': 'cleartext' 'data': '%s-type-common-password' % sitename,
}, 'metadata': {
'schema': 'deckhand/Passphrase/v1' 'layeringDefinition': {
}, { 'abstract': False,
'data': '%s-type-common-password' % sitename, 'layer': 'type'
'metadata': { },
'layeringDefinition': { 'name': '%s-type-common' % sitename,
'abstract': False, 'schema': 'metadata/Document/v1',
'layer': 'type' 'storagePolicy': 'cleartext'
}, },
'name': '%s-type-common' % sitename, 'schema': 'deckhand/Passphrase/v1'
'schema': 'metadata/Document/v1', }, {
'storagePolicy': 'cleartext' 'data': '%s-type-v1.0-password' % sitename,
}, 'metadata': {
'schema': 'deckhand/Passphrase/v1' 'layeringDefinition': {
}, { 'abstract': False,
'data': '%s-type-v1.0-password' % sitename, 'layer': 'type'
'metadata': { },
'layeringDefinition': { 'name': '%s-type-v1.0' % sitename,
'abstract': False, 'schema': 'metadata/Document/v1',
'layer': 'type' 'storagePolicy': 'cleartext'
}, },
'name': '%s-type-v1.0' % sitename, 'schema': 'deckhand/Passphrase/v1'
'schema': 'metadata/Document/v1', }, {
'storagePolicy': 'cleartext' 'data': '%s-chart-password' % sitename,
}, 'metadata': {
'schema': 'deckhand/Passphrase/v1' 'layeringDefinition': {
}, { 'abstract': False,
'data': '%s-chart-password' % sitename, 'layer': 'site'
'metadata': { },
'layeringDefinition': { 'name': '%s-chart' % sitename,
'abstract': False, 'schema': 'metadata/Document/v1',
'layer': 'site' 'storagePolicy': 'cleartext'
}, },
'name': '%s-chart' % sitename, 'schema': 'deckhand/Passphrase/v1'
'schema': 'metadata/Document/v1', }, {
'storagePolicy': 'cleartext' 'data': '%s-passphrase-password' % sitename,
}, 'metadata': {
'schema': 'deckhand/Passphrase/v1' 'layeringDefinition': {
}, { 'abstract': False,
'data': '%s-passphrase-password' % sitename, 'layer': 'site'
'metadata': { },
'layeringDefinition': { 'name': '%s-passphrase' % sitename,
'abstract': False, 'schema': 'metadata/Document/v1',
'layer': 'site' 'storagePolicy': 'cleartext'
}, },
'name': '%s-passphrase' % sitename, 'schema': 'deckhand/Passphrase/v1'
'schema': 'metadata/Document/v1', }
'storagePolicy': 'cleartext' ]
},
'schema': 'deckhand/Passphrase/v1'
}]
expected_documents.extend(documents) expected_documents.extend(documents)
mock_calls = list(mock_render.mock_calls) mock_calls = list(mock_render.mock_calls)
@ -151,13 +152,10 @@ def test_verify_deckhand_render_error_handling(mock_render):
mock_render: Mock render object. mock_render: Mock render object.
""" """
exp_dict = { exp_dict = {
'exp1': 'exp1': DECKHAND_DUPLICATE_SCHEMA + ": Duplicate schema specified.\n",
DECKHAND_DUPLICATE_SCHEMA + ": Duplicate schema specified.\n", 'exp2': DECKHAND_RENDER_EXCEPTION +
'exp2':
DECKHAND_RENDER_EXCEPTION +
": An unknown Deckhand exception occurred while trying to render documents\n", ": An unknown Deckhand exception occurred while trying to render documents\n",
'exp3': 'exp3': "Generic Error\n"
"Generic Error\n"
} }
# No exception raised # No exception raised
mock_render.return_value = _return_deckhand_render_errors() mock_render.return_value = _return_deckhand_render_errors()
@ -231,12 +229,14 @@ def _return_deckhand_render_errors(error_count=0):
""" """
errors = [] errors = []
if error_count >= 1: if error_count >= 1:
errors.append((DECKHAND_DUPLICATE_SCHEMA, errors.append(
'Duplicate schema specified.')) (DECKHAND_DUPLICATE_SCHEMA, 'Duplicate schema specified.'))
if error_count >= 2: if error_count >= 2:
errors.append((DECKHAND_RENDER_EXCEPTION, errors.append(
'An unknown Deckhand exception occurred while ' (
'trying to render documents')) DECKHAND_RENDER_EXCEPTION,
'An unknown Deckhand exception occurred while '
'trying to render documents'))
if error_count >= 3: if error_count >= 3:
errors.append(('Generic Error')) errors.append(('Generic Error'))
return errors return errors

View File

@ -106,8 +106,9 @@ data: h3=DQ#GNYEuCvybgpfW7ZxAP
def test_encrypt_and_decrypt(): def test_encrypt_and_decrypt():
data = test_utils.rand_name("this is an example of un-encrypted " data = test_utils.rand_name(
"data.", "pegleg").encode() "this is an example of un-encrypted "
"data.", "pegleg").encode()
passphrase = test_utils.rand_name("passphrase1", "pegleg").encode() passphrase = test_utils.rand_name("passphrase1", "pegleg").encode()
salt = test_utils.rand_name("salt1", "pegleg").encode() salt = test_utils.rand_name("salt1", "pegleg").encode()
enc1 = crypt.encrypt(data, passphrase, salt) enc1 = crypt.encrypt(data, passphrase, salt)
@ -177,8 +178,9 @@ data: {0}-password
assert len(encrypted_files) > 0 assert len(encrypted_files) > 0
encrypted_path = str( encrypted_path = str(
save_location.join("site/cicd/secrets/passphrases/" save_location.join(
"cicd-passphrase-encrypted.yaml")) "site/cicd/secrets/passphrases/"
"cicd-passphrase-encrypted.yaml"))
decrypted = secrets.decrypt(encrypted_path) decrypted = secrets.decrypt(encrypted_path)
assert yaml.safe_load( assert yaml.safe_load(
decrypted[encrypted_path]) == yaml.safe_load(passphrase_doc) decrypted[encrypted_path]) == yaml.safe_load(passphrase_doc)
@ -211,9 +213,8 @@ def test_pegleg_secret_management_constructor_with_invalid_arguments():
assert 'Either `file_path` or `docs` must be specified.' in str( assert 'Either `file_path` or `docs` must be specified.' in str(
err_info.value) err_info.value)
with pytest.raises(ValueError) as err_info: with pytest.raises(ValueError) as err_info:
PeglegSecretManagement(file_path='file_path', PeglegSecretManagement(
generated=True, file_path='file_path', generated=True, author='test_author')
author='test_author')
assert 'If the document is generated, author and catalog must be ' \ assert 'If the document is generated, author and catalog must be ' \
'specified.' in str(err_info.value) 'specified.' in str(err_info.value)
with pytest.raises(ValueError) as err_info: with pytest.raises(ValueError) as err_info:
@ -221,9 +222,8 @@ def test_pegleg_secret_management_constructor_with_invalid_arguments():
assert 'If the document is generated, author and catalog must be ' \ assert 'If the document is generated, author and catalog must be ' \
'specified.' in str(err_info.value) 'specified.' in str(err_info.value)
with pytest.raises(ValueError) as err_info: with pytest.raises(ValueError) as err_info:
PeglegSecretManagement(docs=['doc'], PeglegSecretManagement(
generated=True, docs=['doc'], generated=True, author='test_author')
author='test_author')
assert 'If the document is generated, author and catalog must be ' \ assert 'If the document is generated, author and catalog must be ' \
'specified.' in str(err_info.value) 'specified.' in str(err_info.value)
with pytest.raises(ValueError) as err_info: with pytest.raises(ValueError) as err_info:
@ -306,8 +306,9 @@ def test_encrypt_decrypt_using_docs(temp_path):
'metadata']['storagePolicy'] 'metadata']['storagePolicy']
@pytest.mark.skipif(not pki_utility.PKIUtility.cfssl_exists(), @pytest.mark.skipif(
reason='cfssl must be installed to execute these tests') not pki_utility.PKIUtility.cfssl_exists(),
reason='cfssl must be installed to execute these tests')
@mock.patch.dict( @mock.patch.dict(
os.environ, { os.environ, {
'PEGLEG_PASSPHRASE': 'ytrr89erARAiPE34692iwUMvWqqBvC', 'PEGLEG_PASSPHRASE': 'ytrr89erARAiPE34692iwUMvWqqBvC',
@ -322,8 +323,8 @@ def test_generate_pki_using_local_repo_path(create_tmp_deployment_files):
repo_path = str( repo_path = str(
git.git_handler(TEST_PARAMS["repo_url"], ref=TEST_PARAMS["repo_rev"])) git.git_handler(TEST_PARAMS["repo_url"], ref=TEST_PARAMS["repo_rev"]))
with mock.patch.dict(config.GLOBAL_CONTEXT, {"site_repo": repo_path}): with mock.patch.dict(config.GLOBAL_CONTEXT, {"site_repo": repo_path}):
pki_generator = PKIGenerator(duration=365, pki_generator = PKIGenerator(
sitename=TEST_PARAMS["site_name"]) duration=365, sitename=TEST_PARAMS["site_name"])
generated_files = pki_generator.generate() generated_files = pki_generator.generate()
assert len(generated_files), 'No secrets were generated' assert len(generated_files), 'No secrets were generated'
@ -333,8 +334,9 @@ def test_generate_pki_using_local_repo_path(create_tmp_deployment_files):
assert list(result), "%s file is empty" % generated_file.name assert list(result), "%s file is empty" % generated_file.name
@pytest.mark.skipif(not pki_utility.PKIUtility.cfssl_exists(), @pytest.mark.skipif(
reason='cfssl must be installed to execute these tests') not pki_utility.PKIUtility.cfssl_exists(),
reason='cfssl must be installed to execute these tests')
@mock.patch.dict( @mock.patch.dict(
os.environ, { os.environ, {
'PEGLEG_PASSPHRASE': 'ytrr89erARAiPE34692iwUMvWqqBvC', 'PEGLEG_PASSPHRASE': 'ytrr89erARAiPE34692iwUMvWqqBvC',
@ -345,8 +347,8 @@ def test_check_expiry(create_tmp_deployment_files):
repo_path = str( repo_path = str(
git.git_handler(TEST_PARAMS["repo_url"], ref=TEST_PARAMS["repo_rev"])) git.git_handler(TEST_PARAMS["repo_url"], ref=TEST_PARAMS["repo_rev"]))
with mock.patch.dict(config.GLOBAL_CONTEXT, {"site_repo": repo_path}): with mock.patch.dict(config.GLOBAL_CONTEXT, {"site_repo": repo_path}):
pki_generator = PKIGenerator(duration=365, pki_generator = PKIGenerator(
sitename=TEST_PARAMS["site_name"]) duration=365, sitename=TEST_PARAMS["site_name"])
generated_files = pki_generator.generate() generated_files = pki_generator.generate()
pki_util = pki_utility.PKIUtility(duration=0) pki_util = pki_utility.PKIUtility(duration=0)
@ -398,10 +400,9 @@ def test_get_global_creds_missing_pass(create_tmp_deployment_files, tmpdir):
site_dir = tmpdir.join("deployment_files", "site", "cicd") site_dir = tmpdir.join("deployment_files", "site", "cicd")
# Create global salt file # Create global salt file
with open( with open(os.path.join(str(site_dir), 'secrets', 'passphrases',
os.path.join(str(site_dir), 'secrets', 'passphrases', 'cicd-global-passphrase-encrypted.yaml'),
'cicd-global-passphrase-encrypted.yaml'), "w") as outfile:
"w") as outfile:
outfile.write(GLOBAL_SALT_DOC) outfile.write(GLOBAL_SALT_DOC)
save_location = tmpdir.mkdir("encrypted_site_files") save_location = tmpdir.mkdir("encrypted_site_files")
@ -466,23 +467,21 @@ def test_global_encrypt_decrypt(create_tmp_deployment_files, tmpdir):
secrets.encrypt(save_location_str, "pytest", "cicd") secrets.encrypt(save_location_str, "pytest", "cicd")
# Create and encrypt a global type document # Create and encrypt a global type document
global_doc_path = os.path.join(str(site_dir), 'secrets', 'passphrases', global_doc_path = os.path.join(
'globally_encrypted_doc.yaml') str(site_dir), 'secrets', 'passphrases', 'globally_encrypted_doc.yaml')
with open(global_doc_path, "w") as outfile: with open(global_doc_path, "w") as outfile:
outfile.write(TEST_GLOBAL_DATA) outfile.write(TEST_GLOBAL_DATA)
# encrypt documents and validate that they were encrypted # encrypt documents and validate that they were encrypted
doc_mgr = PeglegSecretManagement(file_path=global_doc_path, doc_mgr = PeglegSecretManagement(
author='pytest', file_path=global_doc_path, author='pytest', site_name='cicd')
site_name='cicd')
doc_mgr.encrypt_secrets(global_doc_path) doc_mgr.encrypt_secrets(global_doc_path)
doc = doc_mgr.documents[0] doc = doc_mgr.documents[0]
assert doc.is_encrypted() assert doc.is_encrypted()
assert doc.data['encrypted']['by'] == 'pytest' assert doc.data['encrypted']['by'] == 'pytest'
doc_mgr = PeglegSecretManagement(file_path=global_doc_path, doc_mgr = PeglegSecretManagement(
author='pytest', file_path=global_doc_path, author='pytest', site_name='cicd')
site_name='cicd')
decrypted_data = doc_mgr.get_decrypted_secrets() decrypted_data = doc_mgr.get_decrypted_secrets()
test_data = list(yaml.safe_load_all(TEST_GLOBAL_DATA)) test_data = list(yaml.safe_load_all(TEST_GLOBAL_DATA))
assert test_data[0]['data'] == decrypted_data[0]['data'] assert test_data[0]['data'] == decrypted_data[0]['data']

View File

@ -60,9 +60,8 @@ class TestSelectableLinting(object):
msg_2 = 'test msg' msg_2 = 'test msg'
msgs = [(code_1, msg_1), (code_2, msg_2)] msgs = [(code_1, msg_1), (code_2, msg_2)]
with mock.patch.object( with mock.patch.object(lint, '_verify_file_contents',
lint, '_verify_file_contents', return_value=msgs) as mock_methed:
return_value=msgs) as mock_methed:
with pytest.raises(click.ClickException) as expected_exc: with pytest.raises(click.ClickException) as expected_exc:
lint.full(False, exclude_lint, []) lint.full(False, exclude_lint, [])
assert msg_1 in expected_exc assert msg_1 in expected_exc
@ -75,10 +74,9 @@ class TestSelectableLinting(object):
directories. directories.
""" """
exclude_lint = ['P003'] exclude_lint = ['P003']
with mock.patch.object( with mock.patch.object(lint, '_verify_no_unexpected_files',
lint, return_value=[('P003', 'test message')
'_verify_no_unexpected_files', ]) as mock_method:
return_value=[('P003', 'test message')]) as mock_method:
result = lint.full(False, exclude_lint, []) result = lint.full(False, exclude_lint, [])
mock_method.assert_called() mock_method.assert_called()
assert not result # Exclude doesn't return anything. assert not result # Exclude doesn't return anything.
@ -99,9 +97,8 @@ class TestSelectableLinting(object):
msg_2 = 'test msg' msg_2 = 'test msg'
msgs = [(code_1, msg_1), (code_2, msg_2)] msgs = [(code_1, msg_1), (code_2, msg_2)]
with mock.patch.object( with mock.patch.object(lint, '_verify_file_contents',
lint, '_verify_file_contents', return_value=msgs) as mock_methed:
return_value=msgs) as mock_methed:
with pytest.raises(click.ClickException) as expected_exc: with pytest.raises(click.ClickException) as expected_exc:
lint.full( lint.full(
False, exclude_lint=exclude_lint, warn_lint=warn_lint) False, exclude_lint=exclude_lint, warn_lint=warn_lint)
@ -137,20 +134,21 @@ class TestSelectableLinting(object):
config.set_site_repo(self.site_yaml_path) config.set_site_repo(self.site_yaml_path)
documents = { documents = {
mock.sentinel.site: [{ mock.sentinel.site: [
# Create 2 duplicate DataSchema documents. {
"schema": "deckhand/DataSchema/v1", # Create 2 duplicate DataSchema documents.
"metadata": { "schema": "deckhand/DataSchema/v1",
"name": mock.sentinel.document_name "metadata": {
}, "name": mock.sentinel.document_name
"data": {} },
}] * 2 "data": {}
}
] * 2
} }
with mock.patch( with mock.patch(
'pegleg.engine.util.definition.documents_for_each_site', 'pegleg.engine.util.definition.documents_for_each_site',
autospec=True, autospec=True, return_value=documents):
return_value=documents):
result = lint.full( result = lint.full(
False, exclude_lint=exclude_lint, warn_lint=warn_lint) False, exclude_lint=exclude_lint, warn_lint=warn_lint)
assert len(result) == 1 assert len(result) == 1
@ -168,19 +166,20 @@ class TestSelectableLinting(object):
config.set_site_repo(self.site_yaml_path) config.set_site_repo(self.site_yaml_path)
documents = { documents = {
mock.sentinel.site: [{ mock.sentinel.site: [
"schema": "deckhand/DataSchema/v1", {
"metadata": { "schema": "deckhand/DataSchema/v1",
"name": mock.sentinel.document_name "metadata": {
}, "name": mock.sentinel.document_name
"data": {} },
}] "data": {}
}
]
} }
with mock.patch( with mock.patch(
'pegleg.engine.util.definition.documents_for_each_site', 'pegleg.engine.util.definition.documents_for_each_site',
autospec=True, autospec=True, return_value=documents):
return_value=documents):
result = lint.full( result = lint.full(
False, exclude_lint=exclude_lint, warn_lint=warn_lint) False, exclude_lint=exclude_lint, warn_lint=warn_lint)
assert len(result) == 1 assert len(result) == 1
@ -196,10 +195,8 @@ class TestSelectableLinting(object):
p = tmpdir.mkdir(self.__class__.__name__).join("test.yaml") p = tmpdir.mkdir(self.__class__.__name__).join("test.yaml")
p.write("foo: bar") p.write("foo: bar")
with mock.patch( with mock.patch('pegleg.engine.util.files.all', autospec=True,
'pegleg.engine.util.files.all', return_value=[p.strpath]):
autospec=True,
return_value=[p.strpath]):
result = lint.full( result = lint.full(
False, exclude_lint=exclude_lint, warn_lint=warn_lint) False, exclude_lint=exclude_lint, warn_lint=warn_lint)
assert len(result) == 1 assert len(result) == 1
@ -216,10 +213,8 @@ class TestSelectableLinting(object):
# Invalid YAML - will trigger error. # Invalid YAML - will trigger error.
p.write("---\nfoo: bar: baz") p.write("---\nfoo: bar: baz")
with mock.patch( with mock.patch('pegleg.engine.util.files.all', autospec=True,
'pegleg.engine.util.files.all', return_value=[p.strpath]):
autospec=True,
return_value=[p.strpath]):
result = lint.full( result = lint.full(
False, exclude_lint=exclude_lint, warn_lint=warn_lint) False, exclude_lint=exclude_lint, warn_lint=warn_lint)
assert len(result) == 1 assert len(result) == 1

View File

@ -46,14 +46,12 @@ def _site_definition(site_name):
def _expected_document_names(site_name): def _expected_document_names(site_name):
EXPECTED_DOCUMENT_NAMES = [ EXPECTED_DOCUMENT_NAMES = [
'global-common', 'global-common', 'global-v1.0',
'global-v1.0',
'%s-type-common' % site_name, '%s-type-common' % site_name,
'%s-type-v1.0' % site_name, '%s-type-v1.0' % site_name,
_site_definition(site_name)["metadata"]["name"], _site_definition(site_name)["metadata"]["name"],
'%s-chart' % site_name, '%s-chart' % site_name,
'%s-passphrase' % site_name, '%s-passphrase' % site_name, 'deployment-version'
'deployment-version'
] ]
return EXPECTED_DOCUMENT_NAMES return EXPECTED_DOCUMENT_NAMES

View File

@ -23,7 +23,7 @@ from pegleg.engine import exceptions
from pegleg.engine import repository from pegleg.engine import repository
from pegleg.engine import util from pegleg.engine import util
REPO_USERNAME="test_username" REPO_USERNAME = "test_username"
TEST_REPOSITORIES = { TEST_REPOSITORIES = {
'repositories': { 'repositories': {
@ -32,10 +32,10 @@ TEST_REPOSITORIES = {
'url': 'ssh://REPO_USERNAME@gerrit:29418/aic-clcp-manifests.git' 'url': 'ssh://REPO_USERNAME@gerrit:29418/aic-clcp-manifests.git'
}, },
'secrets': { 'secrets': {
'revision': 'revision': 'master',
'master', 'url': (
'url': ('ssh://REPO_USERNAME@gerrit:29418/aic-clcp-security-' 'ssh://REPO_USERNAME@gerrit:29418/aic-clcp-security-'
'manifests.git') 'manifests.git')
} }
} }
} }
@ -48,16 +48,17 @@ FORMATTED_REPOSITORIES = {
REPO_USERNAME) REPO_USERNAME)
}, },
'secrets': { 'secrets': {
'revision': 'revision': 'master',
'master', 'url': (
'url': ('ssh://{}@gerrit:29418/aic-clcp-security-' 'ssh://{}@gerrit:29418/aic-clcp-security-'
'manifests.git'.format(REPO_USERNAME)) 'manifests.git'.format(REPO_USERNAME))
} }
} }
} }
config.set_repo_username(REPO_USERNAME) config.set_repo_username(REPO_USERNAME)
@pytest.fixture(autouse=True) @pytest.fixture(autouse=True)
def clean_temp_folders(): def clean_temp_folders():
try: try:
@ -90,8 +91,8 @@ def _repo_name(repo_url):
return repo_name return repo_name
def _test_process_repositories_inner(site_name="test_site", def _test_process_repositories_inner(
expected_extra_repos=None): site_name="test_site", expected_extra_repos=None):
repository.process_repositories(site_name) repository.process_repositories(site_name)
actual_repo_list = config.get_extra_repo_list() actual_repo_list = config.get_extra_repo_list()
expected_repos = expected_extra_repos.get('repositories', {}) expected_repos = expected_extra_repos.get('repositories', {})
@ -102,12 +103,13 @@ def _test_process_repositories_inner(site_name="test_site",
assert any(repo_name in r for r in actual_repo_list) assert any(repo_name in r for r in actual_repo_list)
def _test_process_repositories(site_repo=None, def _test_process_repositories(
repo_username=None, site_repo=None,
repo_overrides=None, repo_username=None,
expected_repo_url=None, repo_overrides=None,
expected_repo_revision=None, expected_repo_url=None,
expected_repo_overrides=None): expected_repo_revision=None,
expected_repo_overrides=None):
"""Validate :func:`repository.process_repositories`. """Validate :func:`repository.process_repositories`.
:param site_repo: Primary site repository. :param site_repo: Primary site repository.
@ -144,21 +146,23 @@ def _test_process_repositories(site_repo=None,
ref=expected_repo_revision, ref=expected_repo_revision,
auth_key=None) auth_key=None)
] ]
mock_calls.extend([ mock_calls.extend(
mock.call(r['url'], ref=r['revision'], auth_key=None) [
for r in FORMATTED_REPOSITORIES['repositories'].values() mock.call(r['url'], ref=r['revision'], auth_key=None)
]) for r in FORMATTED_REPOSITORIES['repositories'].values()
])
m_clone_repo.assert_has_calls(mock_calls) m_clone_repo.assert_has_calls(mock_calls)
elif repo_username: elif repo_username:
# Validate that the REPO_USERNAME placeholder is replaced by # Validate that the REPO_USERNAME placeholder is replaced by
# repo_username. # repo_username.
m_clone_repo.assert_has_calls([ m_clone_repo.assert_has_calls(
mock.call( [
r['url'].replace('REPO_USERNAME', repo_username), mock.call(
ref=r['revision'], r['url'].replace('REPO_USERNAME', repo_username),
auth_key=None) ref=r['revision'],
for r in FORMATTED_REPOSITORIES['repositories'].values() auth_key=None)
]) for r in FORMATTED_REPOSITORIES['repositories'].values()
])
elif repo_overrides: elif repo_overrides:
# This is computed from: len(cloned extra repos) + # This is computed from: len(cloned extra repos) +
# len(cloned primary repo), which is len(cloned extra repos) + 1 # len(cloned primary repo), which is len(cloned extra repos) + 1
@ -176,31 +180,26 @@ def _test_process_repositories(site_repo=None,
ref = r['revision'] ref = r['revision']
m_clone_repo.assert_any_call(repo_url, ref=ref, auth_key=None) m_clone_repo.assert_any_call(repo_url, ref=ref, auth_key=None)
else: else:
m_clone_repo.assert_has_calls([ m_clone_repo.assert_has_calls(
mock.call(r['url'], ref=r['revision'], auth_key=None) [
for r in FORMATTED_REPOSITORIES['repositories'].values() mock.call(r['url'], ref=r['revision'], auth_key=None)
]) for r in FORMATTED_REPOSITORIES['repositories'].values()
])
if site_repo: if site_repo:
# Set a test site repo, call the test and clean up. # Set a test site repo, call the test and clean up.
with mock.patch.object( with mock.patch.object(config, 'get_site_repo', autospec=True,
config, 'get_site_repo', autospec=True, return_value=site_repo):
return_value=site_repo):
do_test() do_test()
elif repo_username: elif repo_username:
# Set a test repo username, call the test and clean up. # Set a test repo username, call the test and clean up.
with mock.patch.object( with mock.patch.object(config, 'get_repo_username', autospec=True,
config, return_value=repo_username):
'get_repo_username',
autospec=True,
return_value=repo_username):
do_test() do_test()
elif repo_overrides: elif repo_overrides:
with mock.patch.object( with mock.patch.object(config, 'get_extra_repo_overrides',
config, autospec=True,
'get_extra_repo_overrides', return_value=list(repo_overrides.values())):
autospec=True,
return_value=list(repo_overrides.values())):
do_test() do_test()
else: else:
do_test() do_test()
@ -263,8 +262,7 @@ def test_process_repositories_with_repo_username():
def test_process_repositories_with_repo_overrides_remote_urls(): def test_process_repositories_with_repo_overrides_remote_urls():
# Same URL, different revision (than TEST_REPOSITORIES). # Same URL, different revision (than TEST_REPOSITORIES).
overrides = { overrides = {
'global': 'global': 'global=ssh://REPO_USERNAME@gerrit:29418/aic-clcp-manifests.git@12345'
'global=ssh://REPO_USERNAME@gerrit:29418/aic-clcp-manifests.git@12345'
} }
expected_repo_overrides = { expected_repo_overrides = {
'global': { 'global': {
@ -320,10 +318,8 @@ def test_process_repositories_with_repo_overrides_local_paths():
def test_process_repositories_with_multiple_repo_overrides_remote_urls(): def test_process_repositories_with_multiple_repo_overrides_remote_urls():
overrides = { overrides = {
'global': 'global': 'global=ssh://gerrit:29418/aic-clcp-manifests.git@12345',
'global=ssh://gerrit:29418/aic-clcp-manifests.git@12345', 'secrets': 'secrets=ssh://gerrit:29418/aic-clcp-security-manifests.git@54321'
'secrets':
'secrets=ssh://gerrit:29418/aic-clcp-security-manifests.git@54321'
} }
expected_repo_overrides = { expected_repo_overrides = {
'global': { 'global': {
@ -376,23 +372,17 @@ def test_process_repositiories_extraneous_user_repo_value(m_log, *_):
repo_overrides = ['global=ssh://gerrit:29418/aic-clcp-manifests.git'] repo_overrides = ['global=ssh://gerrit:29418/aic-clcp-manifests.git']
# Provide a repo user value. # Provide a repo user value.
with mock.patch.object( with mock.patch.object(config, 'get_repo_username', autospec=True,
config, return_value='test_username'):
'get_repo_username',
autospec=True,
return_value='test_username'):
# Get rid of REPO_USERNAME through an override. # Get rid of REPO_USERNAME through an override.
with mock.patch.object( with mock.patch.object(config, 'get_extra_repo_overrides',
config, autospec=True, return_value=repo_overrides):
'get_extra_repo_overrides',
autospec=True,
return_value=repo_overrides):
_test_process_repositories_inner( _test_process_repositories_inner(
expected_extra_repos=TEST_REPOSITORIES) expected_extra_repos=TEST_REPOSITORIES)
msg = ("A repository username was specified but no REPO_USERNAME " msg = (
"string found in repository url %s", "A repository username was specified but no REPO_USERNAME "
repo_overrides[0].split('=')[-1]) "string found in repository url %s", repo_overrides[0].split('=')[-1])
m_log.warning.assert_any_call(*msg) m_log.warning.assert_any_call(*msg)
@ -436,19 +426,18 @@ def test_process_repositiories_no_site_def_repos_with_extraneous_overrides(
} }
# Provide repo overrides. # Provide repo overrides.
with mock.patch.object( with mock.patch.object(config, 'get_extra_repo_overrides', autospec=True,
config, return_value=repo_overrides):
'get_extra_repo_overrides',
autospec=True,
return_value=repo_overrides):
_test_process_repositories_inner( _test_process_repositories_inner(
site_name=site_name, expected_extra_repos=expected_overrides) site_name=site_name, expected_extra_repos=expected_overrides)
debug_msg = ("Repo override: %s not found under `repositories` for " debug_msg = (
"site-definition.yaml. Site def repositories: %s", "Repo override: %s not found under `repositories` for "
repo_overrides[0], "") "site-definition.yaml. Site def repositories: %s", repo_overrides[0],
info_msg = ("No repositories found in site-definition.yaml for site: %s. " "")
"Defaulting to specified repository overrides.", site_name) info_msg = (
"No repositories found in site-definition.yaml for site: %s. "
"Defaulting to specified repository overrides.", site_name)
m_log.debug.assert_any_call(*debug_msg) m_log.debug.assert_any_call(*debug_msg)
m_log.info.assert_any_call(*info_msg) m_log.info.assert_any_call(*info_msg)
@ -462,12 +451,13 @@ def test_process_repositories_without_repositories_key_in_site_definition(
m_log, *_): m_log, *_):
# Stub this out since default config site repo is '.' and local repo might # Stub this out since default config site repo is '.' and local repo might
# be dirty. # be dirty.
with mock.patch.object( with mock.patch.object(repository, '_handle_repository', autospec=True,
repository, '_handle_repository', autospec=True, return_value=''): return_value=''):
_test_process_repositories_inner( _test_process_repositories_inner(
site_name=mock.sentinel.site, expected_extra_repos={}) site_name=mock.sentinel.site, expected_extra_repos={})
msg = ("The repository for site_name: %s does not contain a " msg = (
"site-definition.yaml with a 'repositories' key") "The repository for site_name: %s does not contain a "
"site-definition.yaml with a 'repositories' key")
assert any(msg in x[1][0] for x in m_log.info.mock_calls) assert any(msg in x[1][0] for x in m_log.info.mock_calls)
@ -483,13 +473,14 @@ def test_process_extra_repositories_malformed_format_raises_exception(
# Will fail since it doesn't contain "=". # Will fail since it doesn't contain "=".
broken_repo_url = 'broken_url' broken_repo_url = 'broken_url'
m_get_extra_repo_overrides.return_value = [broken_repo_url] m_get_extra_repo_overrides.return_value = [broken_repo_url]
error = ("The repository %s must be in the form of " error = (
"name=repoUrl[@revision]" % broken_repo_url) "The repository %s must be in the form of "
"name=repoUrl[@revision]" % broken_repo_url)
# Stub this out since default config site repo is '.' and local repo might # Stub this out since default config site repo is '.' and local repo might
# be dirty. # be dirty.
with mock.patch.object( with mock.patch.object(repository, '_handle_repository', autospec=True,
repository, '_handle_repository', autospec=True, return_value=''): return_value=''):
with pytest.raises(click.ClickException) as exc: with pytest.raises(click.ClickException) as exc:
repository.process_repositories(mock.sentinel.site) repository.process_repositories(mock.sentinel.site)
assert error == str(exc.value) assert error == str(exc.value)
@ -500,11 +491,8 @@ def test_process_site_repository(_):
def _do_test(site_repo, expected): def _do_test(site_repo, expected):
config.set_site_repo(site_repo) config.set_site_repo(site_repo)
with mock.patch.object( with mock.patch.object(repository, '_handle_repository', autospec=True,
repository, side_effect=lambda x, *a, **k: x):
'_handle_repository',
autospec=True,
side_effect=lambda x, *a, **k: x):
result = repository.process_site_repository() result = repository.process_site_repository()
assert os.path.normpath(expected) == os.path.normpath(result) assert os.path.normpath(expected) == os.path.normpath(result)
@ -532,21 +520,14 @@ def test_process_site_repository(_):
def test_format_url_with_repo_username(): def test_format_url_with_repo_username():
TEST_URL = 'ssh://REPO_USERNAME@gerrit:29418/airship/pegleg' TEST_URL = 'ssh://REPO_USERNAME@gerrit:29418/airship/pegleg'
with mock.patch.object( with mock.patch.object(config, 'get_repo_username', autospec=True,
config, return_value=REPO_USERNAME):
'get_repo_username',
autospec=True,
return_value=REPO_USERNAME):
res = repository._format_url_with_repo_username(TEST_URL) res = repository._format_url_with_repo_username(TEST_URL)
assert res == 'ssh://{}@gerrit:29418/airship/pegleg'.format( assert res == 'ssh://{}@gerrit:29418/airship/pegleg'.format(
REPO_USERNAME) REPO_USERNAME)
with mock.patch.object( with mock.patch.object(config, 'get_repo_username', autospec=True,
config, return_value=''):
'get_repo_username',
autospec=True,
return_value=''):
pytest.raises( pytest.raises(
exceptions.GitMissingUserException, exceptions.GitMissingUserException,
repository._format_url_with_repo_username, repository._format_url_with_repo_username, TEST_URL)
TEST_URL)

View File

@ -21,6 +21,7 @@ from tests.unit.fixtures import create_tmp_deployment_files
TEST_DATA = [('/tmp/test_repo', 'test_file.yaml')] TEST_DATA = [('/tmp/test_repo', 'test_file.yaml')]
TEST_DATA_2 = [{'schema': 'pegleg/SiteDefinition/v1', 'data': 'test'}] TEST_DATA_2 = [{'schema': 'pegleg/SiteDefinition/v1', 'data': 'test'}]
def test_no_non_yamls(tmpdir): def test_no_non_yamls(tmpdir):
p = tmpdir.mkdir("deployment_files").mkdir("global") p = tmpdir.mkdir("deployment_files").mkdir("global")
for x in range(3): # Create 3 YAML files for x in range(3): # Create 3 YAML files
@ -36,30 +37,34 @@ def test_no_non_yamls(tmpdir):
def test_list_all_files(create_tmp_deployment_files): def test_list_all_files(create_tmp_deployment_files):
expected_files = sorted([ expected_files = sorted(
'deployment_files/global/common/global-common.yaml', [
'deployment_files/global/v1.0/global-v1.0.yaml', 'deployment_files/global/common/global-common.yaml',
'deployment_files/type/cicd/common/cicd-type-common.yaml', 'deployment_files/global/v1.0/global-v1.0.yaml',
'deployment_files/type/cicd/v1.0/cicd-type-v1.0.yaml', 'deployment_files/type/cicd/common/cicd-type-common.yaml',
'deployment_files/type/lab/common/lab-type-common.yaml', 'deployment_files/type/cicd/v1.0/cicd-type-v1.0.yaml',
'deployment_files/type/lab/v1.0/lab-type-v1.0.yaml', 'deployment_files/type/lab/common/lab-type-common.yaml',
'deployment_files/site/cicd/secrets/passphrases/cicd-passphrase.yaml', 'deployment_files/type/lab/v1.0/lab-type-v1.0.yaml',
'deployment_files/site/cicd/site-definition.yaml', 'deployment_files/site/cicd/secrets/passphrases/cicd-passphrase.yaml',
'deployment_files/site/cicd/software/charts/cicd-chart.yaml', 'deployment_files/site/cicd/site-definition.yaml',
'deployment_files/site/lab/secrets/passphrases/lab-passphrase.yaml', 'deployment_files/site/cicd/software/charts/cicd-chart.yaml',
'deployment_files/site/lab/site-definition.yaml', 'deployment_files/site/lab/secrets/passphrases/lab-passphrase.yaml',
'deployment_files/site/lab/software/charts/lab-chart.yaml', 'deployment_files/site/lab/site-definition.yaml',
]) 'deployment_files/site/lab/software/charts/lab-chart.yaml',
])
actual_files = sorted(files.all()) actual_files = sorted(files.all())
assert len(actual_files) == len(expected_files) assert len(actual_files) == len(expected_files)
for idx, file in enumerate(actual_files): for idx, file in enumerate(actual_files):
assert file.endswith(expected_files[idx]) assert file.endswith(expected_files[idx])
@mock.patch('pegleg.engine.util.definition.site_files_by_repo',autospec=True,
return_value=TEST_DATA) @mock.patch(
@mock.patch('pegleg.engine.util.files.read', autospec=True, 'pegleg.engine.util.definition.site_files_by_repo',
return_value=TEST_DATA_2) autospec=True,
return_value=TEST_DATA)
@mock.patch(
'pegleg.engine.util.files.read', autospec=True, return_value=TEST_DATA_2)
def test_collect_files_by_repo(*args): def test_collect_files_by_repo(*args):
result = files.collect_files_by_repo('test-site') result = files.collect_files_by_repo('test-site')

View File

@ -21,6 +21,7 @@ def test_cryptostring_default_len():
s = s_util.get_crypto_string() s = s_util.get_crypto_string()
assert len(s) == 24 assert len(s) == 24
def test_cryptostring_short_len(): def test_cryptostring_short_len():
s_util = CryptoString() s_util = CryptoString()
s = s_util.get_crypto_string(0) s = s_util.get_crypto_string(0)
@ -30,6 +31,7 @@ def test_cryptostring_short_len():
s = s_util.get_crypto_string(-1) s = s_util.get_crypto_string(-1)
assert len(s) == 24 assert len(s) == 24
def test_cryptostring_long_len(): def test_cryptostring_long_len():
s_util = CryptoString() s_util = CryptoString()
s = s_util.get_crypto_string(25) s = s_util.get_crypto_string(25)
@ -37,6 +39,7 @@ def test_cryptostring_long_len():
s = s_util.get_crypto_string(128) s = s_util.get_crypto_string(128)
assert len(s) == 128 assert len(s) == 128
def test_cryptostring_has_upper(): def test_cryptostring_has_upper():
s_util = CryptoString() s_util = CryptoString()
crypto_string = 'Th1sP@sswordH4sUppers!' crypto_string = 'Th1sP@sswordH4sUppers!'
@ -46,6 +49,7 @@ def test_cryptostring_has_upper():
crypto_string = 'th1sp@sswordh4snouppers!' crypto_string = 'th1sp@sswordh4snouppers!'
assert s_util.has_upper(crypto_string) is False assert s_util.has_upper(crypto_string) is False
def test_cryptostring_has_lower(): def test_cryptostring_has_lower():
s_util = CryptoString() s_util = CryptoString()
crypto_string = 'Th1sP@sswordH4sLowers!' crypto_string = 'Th1sP@sswordH4sLowers!'
@ -55,6 +59,7 @@ def test_cryptostring_has_lower():
crypto_string = 'TH1SP@SSWORDH4SNOLOWERS!' crypto_string = 'TH1SP@SSWORDH4SNOLOWERS!'
assert s_util.has_lower(crypto_string) is False assert s_util.has_lower(crypto_string) is False
def test_cryptostring_has_number(): def test_cryptostring_has_number():
s_util = CryptoString() s_util = CryptoString()
crypto_string = 'Th1sP@sswordH4sNumbers!' crypto_string = 'Th1sP@sswordH4sNumbers!'
@ -64,6 +69,7 @@ def test_cryptostring_has_number():
crypto_string = 'ThisP@sswordHasNoNumbers!' crypto_string = 'ThisP@sswordHasNoNumbers!'
assert s_util.has_number(crypto_string) is False assert s_util.has_number(crypto_string) is False
def test_cryptostring_has_symbol(): def test_cryptostring_has_symbol():
s_util = CryptoString() s_util = CryptoString()
crypto_string = 'Th1sP@sswordH4sSymbols!' crypto_string = 'Th1sP@sswordH4sSymbols!'
@ -73,6 +79,7 @@ def test_cryptostring_has_symbol():
crypto_string = 'ThisPasswordH4sNoSymbols' crypto_string = 'ThisPasswordH4sNoSymbols'
assert s_util.has_symbol(crypto_string) is False assert s_util.has_symbol(crypto_string) is False
def test_cryptostring_has_all(): def test_cryptostring_has_all():
s_util = CryptoString() s_util = CryptoString()
crypto_string = s_util.get_crypto_string() crypto_string = s_util.get_crypto_string()
@ -86,4 +93,4 @@ def test_cryptostring_has_all():
crypto_string = 'ThisP@sswordHasNoNumbers!' crypto_string = 'ThisP@sswordHasNoNumbers!'
assert s_util.validate_crypto_str(crypto_string) is False assert s_util.validate_crypto_str(crypto_string) is False
crypto_string = 'ThisPasswordH4sNoSymbols' crypto_string = 'ThisPasswordH4sNoSymbols'
assert s_util.validate_crypto_str(crypto_string) is False assert s_util.validate_crypto_str(crypto_string) is False

View File

@ -33,8 +33,9 @@ class TestSiteDefinitionHelpers(object):
elif name.startswith(sitename): elif name.startswith(sitename):
site_documents.append(document) site_documents.append(document)
else: else:
raise AssertionError("Unexpected document retrieved by " raise AssertionError(
"`documents_for_site`: %s" % document) "Unexpected document retrieved by "
"`documents_for_site`: %s" % document)
# Assert that documents from both levels appear. # Assert that documents from both levels appear.
assert global_documents assert global_documents
@ -60,7 +61,9 @@ class TestSiteDefinitionHelpers(object):
# Validate that each set of site documents matches the same set of # Validate that each set of site documents matches the same set of
# documents returned by ``documents_for_site`` for that site. # documents returned by ``documents_for_site`` for that site.
assert (sorted(cicd_documents, key=sort_func) == sorted( assert (
documents_by_site["cicd"], key=sort_func)) sorted(cicd_documents, key=sort_func) == sorted(
assert (sorted(lab_documents, key=sort_func) == sorted( documents_by_site["cicd"], key=sort_func))
documents_by_site["lab"], key=sort_func)) assert (
sorted(lab_documents, key=sort_func) == sorted(
documents_by_site["lab"], key=sort_func))

View File

@ -28,8 +28,9 @@ EXPECTED_DIR_PERM = '0o750'
class TestFileHelpers(object): class TestFileHelpers(object):
def test_read_compatible_file(self, create_tmp_deployment_files): def test_read_compatible_file(self, create_tmp_deployment_files):
path = os.path.join(config.get_site_repo(), 'site', 'cicd', 'secrets', path = os.path.join(
'passphrases', 'cicd-passphrase.yaml') config.get_site_repo(), 'site', 'cicd', 'secrets', 'passphrases',
'cicd-passphrase.yaml')
documents = files.read(path) documents = files.read(path)
assert 1 == len(documents) assert 1 == len(documents)
@ -38,15 +39,16 @@ class TestFileHelpers(object):
# Deckhand-formatted document currently but probably shouldn't be, # Deckhand-formatted document currently but probably shouldn't be,
# because it has no business being in Deckhand. As such, validate that # because it has no business being in Deckhand. As such, validate that
# it is ignored. # it is ignored.
path = os.path.join(config.get_site_repo(), 'site', 'cicd', path = os.path.join(
'site-definition.yaml') config.get_site_repo(), 'site', 'cicd', 'site-definition.yaml')
documents = files.read(path) documents = files.read(path)
assert not documents, ("Documents returned should be empty for " assert not documents, (
"site-definition.yaml") "Documents returned should be empty for "
"site-definition.yaml")
def test_write(self, create_tmp_deployment_files): def test_write(self, create_tmp_deployment_files):
path = os.path.join(config.get_site_repo(), 'site', 'cicd', path = os.path.join(
'test_out.yaml') config.get_site_repo(), 'site', 'cicd', 'test_out.yaml')
files.write("test text", path) files.write("test text", path)
with open(path, "r") as out_fi: with open(path, "r") as out_fi:
assert out_fi.read() == "test text" assert out_fi.read() == "test text"
@ -63,8 +65,8 @@ class TestFileHelpers(object):
files.write(object(), path) files.write(object(), path)
def test_file_permissions(self, create_tmp_deployment_files): def test_file_permissions(self, create_tmp_deployment_files):
path = os.path.join(config.get_site_repo(), 'site', 'cicd', path = os.path.join(
'test_out.yaml') config.get_site_repo(), 'site', 'cicd', 'test_out.yaml')
files.write("test text", path) files.write("test text", path)
assert oct(os.stat(path).st_mode & 0o777) == EXPECTED_FILE_PERM assert oct(os.stat(path).st_mode & 0o777) == EXPECTED_FILE_PERM

View File

@ -93,8 +93,8 @@ def test_git_clone_with_commit_reference():
reason='git clone requires network connectivity.') reason='git clone requires network connectivity.')
def test_git_clone_with_patch_ref(): def test_git_clone_with_patch_ref():
ref = 'refs/changes/54/457754/73' ref = 'refs/changes/54/457754/73'
git_dir = git.git_handler('https://review.opendev.org/openstack/openstack-helm', git_dir = git.git_handler(
ref) 'https://review.opendev.org/openstack/openstack-helm', ref)
_validate_git_clone(git_dir, ref) _validate_git_clone(git_dir, ref)
@ -110,8 +110,8 @@ def test_git_clone_behind_proxy(mock_log):
git_dir = git.git_handler(url, commit, proxy_server=proxy_server) git_dir = git.git_handler(url, commit, proxy_server=proxy_server)
_validate_git_clone(git_dir, commit) _validate_git_clone(git_dir, commit)
mock_log.debug.assert_any_call('Cloning [%s] with proxy [%s]', url, mock_log.debug.assert_any_call(
proxy_server) 'Cloning [%s] with proxy [%s]', url, proxy_server)
mock_log.debug.reset_mock() mock_log.debug.reset_mock()
@ -443,8 +443,7 @@ def test_git_clone_fake_proxy():
@mock.patch('os.path.exists', return_value=True, autospec=True) @mock.patch('os.path.exists', return_value=True, autospec=True)
def test_git_clone_ssh_auth_method_fails_auth(_): def test_git_clone_ssh_auth_method_fails_auth(_):
fake_user = test_utils.rand_name('fake_user') fake_user = test_utils.rand_name('fake_user')
url = ('ssh://%s@review.opendev.org:29418/airship/armada' % url = ('ssh://%s@review.opendev.org:29418/airship/armada' % fake_user)
fake_user)
with pytest.raises(exceptions.GitAuthException): with pytest.raises(exceptions.GitAuthException):
git._try_git_clone( git._try_git_clone(
url, ref='refs/changes/17/388517/5', auth_key='/home/user/.ssh/') url, ref='refs/changes/17/388517/5', auth_key='/home/user/.ssh/')
@ -456,8 +455,7 @@ def test_git_clone_ssh_auth_method_fails_auth(_):
@mock.patch('os.path.exists', return_value=False, autospec=True) @mock.patch('os.path.exists', return_value=False, autospec=True)
def test_git_clone_ssh_auth_method_missing_ssh_key(_): def test_git_clone_ssh_auth_method_missing_ssh_key(_):
fake_user = test_utils.rand_name('fake_user') fake_user = test_utils.rand_name('fake_user')
url = ('ssh://%s@review.opendev.org:29418/airship/armada' % url = ('ssh://%s@review.opendev.org:29418/airship/armada' % fake_user)
fake_user)
with pytest.raises(exceptions.GitSSHException): with pytest.raises(exceptions.GitSSHException):
git.git_handler( git.git_handler(
url, ref='refs/changes/17/388517/5', auth_key='/home/user/.ssh/') url, ref='refs/changes/17/388517/5', auth_key='/home/user/.ssh/')

View File

@ -24,33 +24,60 @@ from pegleg.engine.util.shipyard_helper import ShipyardClient
# Dummy data to be used as collected documents # Dummy data to be used as collected documents
DATA = { DATA = {
'test-repo': 'test-repo': [
[{'schema': 'pegleg/SiteDefinition/v1', {
'metadata': {'schema': 'metadata/Document/v1', 'schema': 'pegleg/SiteDefinition/v1',
'layeringDefinition': {'abstract': False, 'metadata': {
'layer': 'site'}, 'schema': 'metadata/Document/v1',
'name': 'site-name', 'layeringDefinition': {
'storagePolicy': 'cleartext'}, 'abstract': False,
'data': {'site_type': 'foundry'}}]} 'layer': 'site'
},
'name': 'site-name',
'storagePolicy': 'cleartext'
},
'data': {
'site_type': 'foundry'
}
}
]
}
MULTI_REPO_DATA = { MULTI_REPO_DATA = {
'repo1': 'repo1': [
[{'schema': 'pegleg/SiteDefinition/v1', {
'metadata': {'schema': 'metadata/Document/v1', 'schema': 'pegleg/SiteDefinition/v1',
'layeringDefinition': {'abstract': False, 'metadata': {
'layer': 'site'}, 'schema': 'metadata/Document/v1',
'name': 'site-name', 'layeringDefinition': {
'storagePolicy': 'cleartext'}, 'abstract': False,
'data': {'site_type': 'foundry'}}], 'layer': 'site'
'repo2': },
[{'schema': 'pegleg/SiteDefinition/v1', 'name': 'site-name',
'metadata': {'schema': 'metadata/Document/v1', 'storagePolicy': 'cleartext'
'layeringDefinition': {'abstract': False, },
'layer': 'site'}, 'data': {
'name': 'site-name', 'site_type': 'foundry'
'storagePolicy': 'cleartext'}, }
'data': {'site_type': 'foundry'}}] }
],
'repo2': [
{
'schema': 'pegleg/SiteDefinition/v1',
'metadata': {
'schema': 'metadata/Document/v1',
'layeringDefinition': {
'abstract': False,
'layer': 'site'
},
'name': 'site-name',
'storagePolicy': 'cleartext'
},
'data': {
'site_type': 'foundry'
}
}
]
} }
@ -79,9 +106,7 @@ def _get_context():
'password': 'passwordTest', 'password': 'passwordTest',
'auth_url': 'urlTest' 'auth_url': 'urlTest'
} }
ctx.obj['API_PARAMETERS'] = { ctx.obj['API_PARAMETERS'] = {'auth_vars': auth_vars}
'auth_vars': auth_vars
}
ctx.obj['context_marker'] = '88888888-4444-4444-4444-121212121212' ctx.obj['context_marker'] = '88888888-4444-4444-4444-121212121212'
ctx.obj['site_name'] = 'test-site' ctx.obj['site_name'] = 'test-site'
ctx.obj['collection'] = 'test-site' ctx.obj['collection'] = 'test-site'
@ -99,9 +124,7 @@ def _get_bad_context():
'password': 'passwordTest', 'password': 'passwordTest',
'auth_url': None 'auth_url': None
} }
ctx.obj['API_PARAMETERS'] = { ctx.obj['API_PARAMETERS'] = {'auth_vars': auth_vars}
'auth_vars': auth_vars
}
ctx.obj['context_marker'] = '88888888-4444-4444-4444-121212121212' ctx.obj['context_marker'] = '88888888-4444-4444-4444-121212121212'
ctx.obj['site_name'] = 'test-site' ctx.obj['site_name'] = 'test-site'
ctx.obj['collection'] = None ctx.obj['collection'] = None
@ -131,14 +154,20 @@ def test_shipyard_helper_init_():
assert isinstance(shipyard_helper.api_client, ShipyardClient) assert isinstance(shipyard_helper.api_client, ShipyardClient)
@mock.patch('pegleg.engine.util.files.collect_files_by_repo', autospec=True, @mock.patch(
return_value=MULTI_REPO_DATA) 'pegleg.engine.util.files.collect_files_by_repo',
@mock.patch.object(ShipyardHelper, 'formatted_response_handler', autospec=True,
autospec=True, return_value=None) return_value=MULTI_REPO_DATA)
@mock.patch.dict(os.environ, { @mock.patch.object(
'PEGLEG_PASSPHRASE': 'ytrr89erARAiPE34692iwUMvWqqBvC', ShipyardHelper,
'PEGLEG_SALT': 'MySecretSalt1234567890][' 'formatted_response_handler',
}) autospec=True,
return_value=None)
@mock.patch.dict(
os.environ, {
'PEGLEG_PASSPHRASE': 'ytrr89erARAiPE34692iwUMvWqqBvC',
'PEGLEG_SALT': 'MySecretSalt1234567890]['
})
def test_upload_documents(*args): def test_upload_documents(*args):
""" Tests upload document """ """ Tests upload document """
# Scenario: # Scenario:
@ -164,14 +193,20 @@ def test_upload_documents(*args):
mock_api_client.post_configdocs.assert_called_once() mock_api_client.post_configdocs.assert_called_once()
@mock.patch('pegleg.engine.util.files.collect_files_by_repo', autospec=True, @mock.patch(
return_value=DATA) 'pegleg.engine.util.files.collect_files_by_repo',
@mock.patch.object(ShipyardHelper, 'formatted_response_handler', autospec=True,
autospec=True, return_value=None) return_value=DATA)
@mock.patch.dict(os.environ, { @mock.patch.object(
'PEGLEG_PASSPHRASE': 'ytrr89erARAiPE34692iwUMvWqqBvC', ShipyardHelper,
'PEGLEG_SALT': 'MySecretSalt1234567890][' 'formatted_response_handler',
}) autospec=True,
return_value=None)
@mock.patch.dict(
os.environ, {
'PEGLEG_PASSPHRASE': 'ytrr89erARAiPE34692iwUMvWqqBvC',
'PEGLEG_SALT': 'MySecretSalt1234567890]['
})
def test_upload_documents_fail(*args): def test_upload_documents_fail(*args):
""" Tests Document upload error """ """ Tests Document upload error """
# Scenario: # Scenario:
@ -191,10 +226,15 @@ def test_upload_documents_fail(*args):
ShipyardHelper(context).upload_documents() ShipyardHelper(context).upload_documents()
@mock.patch('pegleg.engine.util.files.collect_files_by_repo', autospec=True, @mock.patch(
return_value=DATA) 'pegleg.engine.util.files.collect_files_by_repo',
@mock.patch.object(ShipyardHelper, 'formatted_response_handler', autospec=True,
autospec=True, return_value=None) return_value=DATA)
@mock.patch.object(
ShipyardHelper,
'formatted_response_handler',
autospec=True,
return_value=None)
def test_fail_auth(*args): def test_fail_auth(*args):
""" Tests Auth Failure """ """ Tests Auth Failure """
# Scenario: # Scenario:
@ -209,8 +249,11 @@ def test_fail_auth(*args):
ShipyardHelper(context).validate_auth_vars() ShipyardHelper(context).validate_auth_vars()
@mock.patch.object(ShipyardHelper, 'formatted_response_handler', @mock.patch.object(
autospec=True, return_value=None) ShipyardHelper,
'formatted_response_handler',
autospec=True,
return_value=None)
def test_commit_documents(*args): def test_commit_documents(*args):
"""Tests commit document """ """Tests commit document """
# Scenario: # Scenario:

View File

@ -81,14 +81,14 @@ def create_tmp_deployment_files(tmpdir):
'directories': { 'directories': {
'common': { 'common': {
'files': { 'files': {
'global-common.yaml': 'global-common.yaml': _gen_document(
_gen_document(name="global-common", layer='global') name="global-common", layer='global')
} }
}, },
'v1.0': { 'v1.0': {
'files': { 'files': {
'global-v1.0.yaml': 'global-v1.0.yaml': _gen_document(
_gen_document(name="global-v1.0", layer='global') name="global-v1.0", layer='global')
} }
} }
} }
@ -103,15 +103,13 @@ def create_tmp_deployment_files(tmpdir):
'directories': { 'directories': {
'common': { 'common': {
'files': { 'files': {
'%s-type-common.yaml' % site: '%s-type-common.yaml' % site: _gen_document(
_gen_document(
name="%s-type-common" % site, layer='type') name="%s-type-common" % site, layer='type')
} }
}, },
'v1.0': { 'v1.0': {
'files': { 'files': {
'%s-type-v1.0.yaml' % site: '%s-type-v1.0.yaml' % site: _gen_document(
_gen_document(
name="%s-type-v1.0" % site, layer='type') name="%s-type-v1.0" % site, layer='type')
} }
} }
@ -142,13 +140,13 @@ schema: pegleg/SiteDefinition/v1
test_structure = SITE_TEST_STRUCTURE.copy() test_structure = SITE_TEST_STRUCTURE.copy()
test_structure['directories']['secrets']['directories']['passphrases'][ test_structure['directories']['secrets']['directories']['passphrases'][
'files'] = { 'files'] = {
'%s-passphrase.yaml' % site: '%s-passphrase.yaml' % site: _gen_document(
_gen_document(name="%s-passphrase" % site, layer='site') name="%s-passphrase" % site, layer='site')
} }
test_structure['directories']['software']['directories']['charts'][ test_structure['directories']['software']['directories']['charts'][
'files'] = { 'files'] = {
'%s-chart.yaml' % site: '%s-chart.yaml' % site: _gen_document(
_gen_document(name="%s-chart" % site, layer='site') name="%s-chart" % site, layer='site')
} }
test_structure['files']['site-definition.yaml'] = yaml.safe_load( test_structure['files']['site-definition.yaml'] = yaml.safe_load(
site_definition) site_definition)

View File

@ -46,8 +46,9 @@ DEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEF
""" """
@pytest.mark.skipif(not test_utils.is_connected(), @pytest.mark.skipif(
reason='git clone requires network connectivity.') not test_utils.is_connected(),
reason='git clone requires network connectivity.')
class BaseCLIActionTest(object): class BaseCLIActionTest(object):
"""Tests end-to-end flows for all Pegleg CLI actions, with minimal mocking. """Tests end-to-end flows for all Pegleg CLI actions, with minimal mocking.
@ -73,8 +74,8 @@ class BaseCLIActionTest(object):
cls.repo_rev = TEST_PARAMS["repo_rev"] cls.repo_rev = TEST_PARAMS["repo_rev"]
cls.repo_name = TEST_PARAMS["repo_name"] cls.repo_name = TEST_PARAMS["repo_name"]
cls.treasuremap_path = git.git_handler(TEST_PARAMS["repo_url"], cls.treasuremap_path = git.git_handler(
ref=TEST_PARAMS["repo_rev"]) TEST_PARAMS["repo_url"], ref=TEST_PARAMS["repo_rev"])
class TestSiteCLIOptions(BaseCLIActionTest): class TestSiteCLIOptions(BaseCLIActionTest):
@ -93,8 +94,8 @@ class TestSiteCLIOptions(BaseCLIActionTest):
# 1) List sites (should clone repo automatically to `clone_path` # 1) List sites (should clone repo automatically to `clone_path`
# location if `clone_path` is set) # location if `clone_path` is set)
repo_url = 'https://opendev.org/airship/%s@%s' % (self.repo_name, repo_url = 'https://opendev.org/airship/%s@%s' % (
self.repo_rev) self.repo_name, self.repo_rev)
# Note that the -p option is used to specify the clone_folder # Note that the -p option is used to specify the clone_folder
site_list = self.runner.invoke( site_list = self.runner.invoke(
@ -143,8 +144,8 @@ class TestSiteCLIOptionsNegative(BaseCLIActionTest):
# 1) List sites (should clone repo automatically to `clone_path` # 1) List sites (should clone repo automatically to `clone_path`
# location if `clone_path` is set) # location if `clone_path` is set)
repo_url = 'https://opendev.org/airship/%s@%s' % (self.repo_name, repo_url = 'https://opendev.org/airship/%s@%s' % (
self.repo_rev) self.repo_name, self.repo_rev)
# Note that the -p option is used to specify the clone_folder # Note that the -p option is used to specify the clone_folder
site_list = self.runner.invoke( site_list = self.runner.invoke(
@ -170,10 +171,11 @@ class TestSiteCliActions(BaseCLIActionTest):
### Collect tests ### ### Collect tests ###
def _validate_collect_site_action(self, repo_path_or_url, save_location): def _validate_collect_site_action(self, repo_path_or_url, save_location):
result = self.runner.invoke(cli.site, [ result = self.runner.invoke(
'-r', repo_path_or_url, 'collect', self.site_name, '-s', cli.site, [
save_location '-r', repo_path_or_url, 'collect', self.site_name, '-s',
]) save_location
])
collected_files = os.listdir(save_location) collected_files = os.listdir(save_location)
@ -191,8 +193,8 @@ class TestSiteCliActions(BaseCLIActionTest):
# 2) Collect into save location (should clone repo automatically) # 2) Collect into save location (should clone repo automatically)
# 3) Check that expected file name is there # 3) Check that expected file name is there
repo_url = 'https://opendev.org/airship/%s@%s' % (self.repo_name, repo_url = 'https://opendev.org/airship/%s@%s' % (
self.repo_rev) self.repo_name, self.repo_rev)
self._validate_collect_site_action(repo_url, temp_path) self._validate_collect_site_action(repo_url, temp_path)
def test_collect_using_remote_repo_url_ending_with_dot_git( def test_collect_using_remote_repo_url_ending_with_dot_git(
@ -204,8 +206,8 @@ class TestSiteCliActions(BaseCLIActionTest):
# 2) Collect into save location (should clone repo automatically) # 2) Collect into save location (should clone repo automatically)
# 3) Check that expected file name is there # 3) Check that expected file name is there
repo_url = 'https://opendev.org/airship/%s@%s.git' % (self.repo_name, repo_url = 'https://opendev.org/airship/%s@%s.git' % (
self.repo_rev) self.repo_name, self.repo_rev)
self._validate_collect_site_action(repo_url, temp_path) self._validate_collect_site_action(repo_url, temp_path)
def test_collect_using_local_path(self, temp_path): def test_collect_using_local_path(self, temp_path):
@ -232,8 +234,8 @@ class TestSiteCliActions(BaseCLIActionTest):
with mock.patch('pegleg.engine.site.util.deckhand') as mock_deckhand: with mock.patch('pegleg.engine.site.util.deckhand') as mock_deckhand:
mock_deckhand.deckhand_render.return_value = ([], []) mock_deckhand.deckhand_render.return_value = ([], [])
result = self.runner.invoke(cli.site, result = self.runner.invoke(
lint_command + exclude_lint_command) cli.site, lint_command + exclude_lint_command)
assert result.exit_code == 0, result.output assert result.exit_code == 0, result.output
@ -251,8 +253,8 @@ class TestSiteCliActions(BaseCLIActionTest):
# 1) Mock out Deckhand render (so we can ignore P005 issues) # 1) Mock out Deckhand render (so we can ignore P005 issues)
# 2) Lint site with exclude flags (should clone repo automatically) # 2) Lint site with exclude flags (should clone repo automatically)
repo_url = 'https://opendev.org/airship/%s@%s' % (self.repo_name, repo_url = 'https://opendev.org/airship/%s@%s' % (
self.repo_rev) self.repo_name, self.repo_rev)
self._test_lint_site_action(repo_url, exclude=True) self._test_lint_site_action(repo_url, exclude=True)
def test_lint_site_using_local_path_with_exclude(self): def test_lint_site_using_local_path_with_exclude(self):
@ -294,8 +296,8 @@ class TestSiteCliActions(BaseCLIActionTest):
# #
# 1) List sites (should clone repo automatically) # 1) List sites (should clone repo automatically)
repo_url = 'https://opendev.org/airship/%s@%s' % (self.repo_name, repo_url = 'https://opendev.org/airship/%s@%s' % (
self.repo_rev) self.repo_name, self.repo_rev)
self._validate_list_site_action(repo_url, temp_path) self._validate_list_site_action(repo_url, temp_path)
@ -312,9 +314,11 @@ class TestSiteCliActions(BaseCLIActionTest):
def _validate_site_show_action(self, repo_path_or_url, temp_path): def _validate_site_show_action(self, repo_path_or_url, temp_path):
mock_output = os.path.join(temp_path, 'output') mock_output = os.path.join(temp_path, 'output')
result = self.runner.invoke(cli.site, [ result = self.runner.invoke(
'-r', repo_path_or_url, 'show', self.site_name, '-o', mock_output cli.site, [
]) '-r', repo_path_or_url, 'show', self.site_name, '-o',
mock_output
])
assert result.exit_code == 0, result.output assert result.exit_code == 0, result.output
with open(mock_output, 'r') as f: with open(mock_output, 'r') as f:
@ -327,8 +331,8 @@ class TestSiteCliActions(BaseCLIActionTest):
# #
# 1) Show site (should clone repo automatically) # 1) Show site (should clone repo automatically)
repo_url = 'https://opendev.org/airship/%s@%s' % (self.repo_name, repo_url = 'https://opendev.org/airship/%s@%s' % (
self.repo_rev) self.repo_name, self.repo_rev)
self._validate_site_show_action(repo_url, temp_path) self._validate_site_show_action(repo_url, temp_path)
def test_show_site_using_local_path(self, temp_path): def test_show_site_using_local_path(self, temp_path):
@ -361,8 +365,8 @@ class TestSiteCliActions(BaseCLIActionTest):
# 1) Mock out Deckhand render (so we can ignore P005 issues) # 1) Mock out Deckhand render (so we can ignore P005 issues)
# 2) Render site (should clone repo automatically) # 2) Render site (should clone repo automatically)
repo_url = 'https://opendev.org/airship/%s@%s' % (self.repo_name, repo_url = 'https://opendev.org/airship/%s@%s' % (
self.repo_rev) self.repo_name, self.repo_rev)
self._validate_render_site_action(repo_url) self._validate_render_site_action(repo_url)
def test_render_site_using_local_path(self): def test_render_site_using_local_path(self):
@ -387,10 +391,11 @@ class TestSiteCliActions(BaseCLIActionTest):
repo_path = self.treasuremap_path repo_path = self.treasuremap_path
with mock.patch('pegleg.cli.ShipyardHelper') as mock_obj: with mock.patch('pegleg.cli.ShipyardHelper') as mock_obj:
result = self.runner.invoke(cli.site, [ result = self.runner.invoke(
'-r', repo_path, 'upload', self.site_name, '--collection', cli.site, [
'collection' '-r', repo_path, 'upload', self.site_name, '--collection',
]) 'collection'
])
assert result.exit_code == 0 assert result.exit_code == 0
mock_obj.assert_called_once() mock_obj.assert_called_once()
@ -435,8 +440,8 @@ class TestRepoCliActions(BaseCLIActionTest):
# 1) Mock out Deckhand render (so we can ignore P005 issues) # 1) Mock out Deckhand render (so we can ignore P005 issues)
# 2) Lint repo with exclude flags (should clone repo automatically) # 2) Lint repo with exclude flags (should clone repo automatically)
repo_url = 'https://opendev.org/airship/%s@%s' % (self.repo_name, repo_url = 'https://opendev.org/airship/%s@%s' % (
self.repo_rev) self.repo_name, self.repo_rev)
lint_command = ['-r', repo_url, 'lint'] lint_command = ['-r', repo_url, 'lint']
exclude_lint_command = [ exclude_lint_command = [
@ -446,8 +451,8 @@ class TestRepoCliActions(BaseCLIActionTest):
with mock.patch('pegleg.engine.site.util.deckhand') as mock_deckhand: with mock.patch('pegleg.engine.site.util.deckhand') as mock_deckhand:
mock_deckhand.deckhand_render.return_value = ([], []) mock_deckhand.deckhand_render.return_value = ([], [])
result = self.runner.invoke(cli.repo, result = self.runner.invoke(
lint_command + exclude_lint_command) cli.repo, lint_command + exclude_lint_command)
assert result.exit_code == 0, result.output assert result.exit_code == 0, result.output
# A successful result (while setting lint checks to exclude) should # A successful result (while setting lint checks to exclude) should
@ -470,8 +475,8 @@ class TestRepoCliActions(BaseCLIActionTest):
with mock.patch('pegleg.engine.site.util.deckhand') as mock_deckhand: with mock.patch('pegleg.engine.site.util.deckhand') as mock_deckhand:
mock_deckhand.deckhand_render.return_value = ([], []) mock_deckhand.deckhand_render.return_value = ([], [])
result = self.runner.invoke(cli.repo, result = self.runner.invoke(
lint_command + exclude_lint_command) cli.repo, lint_command + exclude_lint_command)
assert result.exit_code == 0, result.output assert result.exit_code == 0, result.output
# A successful result (while setting lint checks to exclude) should # A successful result (while setting lint checks to exclude) should
@ -506,26 +511,26 @@ class TestSiteSecretsActions(BaseCLIActionTest):
result = yaml.safe_load_all(f) # Validate valid YAML. result = yaml.safe_load_all(f) # Validate valid YAML.
assert list(result), "%s file is empty" % generated_file assert list(result), "%s file is empty" % generated_file
@pytest.mark.skipif(not pki_utility.PKIUtility.cfssl_exists(), @pytest.mark.skipif(
reason='cfssl must be installed to execute these tests' not pki_utility.PKIUtility.cfssl_exists(),
) reason='cfssl must be installed to execute these tests')
def test_site_secrets_generate_pki_using_remote_repo_url(self): def test_site_secrets_generate_pki_using_remote_repo_url(self):
"""Validates ``generate-pki`` action using remote repo URL.""" """Validates ``generate-pki`` action using remote repo URL."""
# Scenario: # Scenario:
# #
# 1) Generate PKI using remote repo URL # 1) Generate PKI using remote repo URL
repo_url = 'https://opendev.org/airship/%s@%s' % (self.repo_name, repo_url = 'https://opendev.org/airship/%s@%s' % (
self.repo_rev) self.repo_name, self.repo_rev)
secrets_opts = ['secrets', 'generate-pki', self.site_name] secrets_opts = ['secrets', 'generate-pki', self.site_name]
result = self.runner.invoke(cli.site, ['-r', repo_url] + secrets_opts) result = self.runner.invoke(cli.site, ['-r', repo_url] + secrets_opts)
self._validate_generate_pki_action(result) self._validate_generate_pki_action(result)
@pytest.mark.skipif(not pki_utility.PKIUtility.cfssl_exists(), @pytest.mark.skipif(
reason='cfssl must be installed to execute these tests' not pki_utility.PKIUtility.cfssl_exists(),
) reason='cfssl must be installed to execute these tests')
def test_site_secrets_generate_pki_using_local_repo_path(self): def test_site_secrets_generate_pki_using_local_repo_path(self):
"""Validates ``generate-pki`` action using local repo path.""" """Validates ``generate-pki`` action using local repo path."""
# Scenario: # Scenario:
@ -538,9 +543,9 @@ class TestSiteSecretsActions(BaseCLIActionTest):
result = self.runner.invoke(cli.site, ['-r', repo_path] + secrets_opts) result = self.runner.invoke(cli.site, ['-r', repo_path] + secrets_opts)
self._validate_generate_pki_action(result) self._validate_generate_pki_action(result)
@pytest.mark.skipif(not pki_utility.PKIUtility.cfssl_exists(), @pytest.mark.skipif(
reason='cfssl must be installed to execute these tests' not pki_utility.PKIUtility.cfssl_exists(),
) reason='cfssl must be installed to execute these tests')
@mock.patch.dict( @mock.patch.dict(
os.environ, { os.environ, {
"PEGLEG_PASSPHRASE": "123456789012345678901234567890", "PEGLEG_PASSPHRASE": "123456789012345678901234567890",
@ -553,8 +558,9 @@ class TestSiteSecretsActions(BaseCLIActionTest):
# 1) Encrypt a file in a local repo # 1) Encrypt a file in a local repo
repo_path = self.treasuremap_path repo_path = self.treasuremap_path
file_path = os.path.join(repo_path, "site", "airship-seaworthy", file_path = os.path.join(
"secrets", "passphrases", "ceph_fsid.yaml") repo_path, "site", "airship-seaworthy", "secrets", "passphrases",
"ceph_fsid.yaml")
with open(file_path, "r") as ceph_fsid_fi: with open(file_path, "r") as ceph_fsid_fi:
ceph_fsid = yaml.safe_load(ceph_fsid_fi) ceph_fsid = yaml.safe_load(ceph_fsid_fi)
ceph_fsid["metadata"]["storagePolicy"] = "encrypted" ceph_fsid["metadata"]["storagePolicy"] = "encrypted"
@ -582,9 +588,9 @@ class TestSiteSecretsActions(BaseCLIActionTest):
result = self.runner.invoke(cli.site, ['-r', repo_path] + secrets_opts) result = self.runner.invoke(cli.site, ['-r', repo_path] + secrets_opts)
assert result.exit_code == 0, result.output assert result.exit_code == 0, result.output
@pytest.mark.skipif(not pki_utility.PKIUtility.cfssl_exists(), @pytest.mark.skipif(
reason='cfssl must be installed to execute these tests' not pki_utility.PKIUtility.cfssl_exists(),
) reason='cfssl must be installed to execute these tests')
def test_check_pki_certs(self): def test_check_pki_certs(self):
repo_path = self.treasuremap_path repo_path = self.treasuremap_path
secrets_opts = ['secrets', 'check-pki-certs', self.site_name] secrets_opts = ['secrets', 'check-pki-certs', self.site_name]
@ -603,8 +609,8 @@ class TestSiteSecretsActions(BaseCLIActionTest):
# 1) Encrypt a file in a local repo # 1) Encrypt a file in a local repo
repo_path = self.treasuremap_path repo_path = self.treasuremap_path
file_dir = os.path.join(repo_path, "site", "airship-seaworthy", file_dir = os.path.join(
"secrets", "certificates") repo_path, "site", "airship-seaworthy", "secrets", "certificates")
file_path = os.path.join(file_dir, "test.crt") file_path = os.path.join(file_dir, "test.crt")
output_path = os.path.join(file_dir, "test.yaml") output_path = os.path.join(file_dir, "test.yaml")
@ -671,8 +677,8 @@ class TestTypeCliActions(BaseCLIActionTest):
# #
# 1) List types (should clone repo automatically) # 1) List types (should clone repo automatically)
repo_url = 'https://opendev.org/airship/%s@%s' % (self.repo_name, repo_url = 'https://opendev.org/airship/%s@%s' % (
self.repo_rev) self.repo_name, self.repo_rev)
self._validate_type_list_action(repo_url, temp_path) self._validate_type_list_action(repo_url, temp_path)
def test_list_types_using_local_repo_path(self, temp_path): def test_list_types_using_local_repo_path(self, temp_path):

View File

@ -22,12 +22,10 @@ import requests
import uuid import uuid
_PROXY_SERVERS = { _PROXY_SERVERS = {
'http': 'http': os.getenv(
os.getenv('HTTP_PROXY', os.getenv('http_proxy', 'HTTP_PROXY', os.getenv('http_proxy', 'http://proxy.example.com')),
'http://proxy.example.com')), 'https': os.getenv(
'https': 'HTTPS_PROXY', os.getenv('https_proxy', 'https://proxy.example.com'))
os.getenv('HTTPS_PROXY',
os.getenv('https_proxy', 'https://proxy.example.com'))
} }

View File

@ -36,6 +36,7 @@ commands =
bash -c "{toxinidir}/tools/gate/whitespace-linter.sh" bash -c "{toxinidir}/tools/gate/whitespace-linter.sh"
bandit -r pegleg -n 5 bandit -r pegleg -n 5
flake8 {toxinidir}/pegleg flake8 {toxinidir}/pegleg
yapf -dr {toxinidir}/pegleg {toxinidir}/tests
whitelist_externals = whitelist_externals =
bash bash
@ -98,6 +99,10 @@ enable-extensions = H106,H201,H904
# [H403] multi line docstrings should end on a new line # [H403] multi line docstrings should end on a new line
# [H404] multi line docstring should start without a leading new line # [H404] multi line docstring should start without a leading new line
# [H405] multi line docstring summary not separated with an empty line # [H405] multi line docstring summary not separated with an empty line
# [W503] line break before binary operator
ignore = H403,H404,H405,W503 ignore = H403,H404,H405,W503
exclude=.venv,.git,.tox,build,dist,*lib/python*,*egg,tools,*.ini,*.po,*.pot exclude=.venv,.git,.tox,build,dist,*lib/python*,*egg,tools,*.ini,*.po,*.pot
max-complexity = 24 max-complexity = 24
application-import-names = pegleg
application-package-names = deckhand,promenade,shipyard
import-order-style = pep8