Remove passwords reflection from logging messages
Implement custom Logger class for sanitizing dict(), list() and fuel_upgrade.config.Config() instances. Add sanitizer() function which substitutes config data with mask based on keywords. Refactor all logger calls to use the sanitizer. Closes-Bug: 1387164 Change-Id: I810610aed59f7a6a904b1bcfe4d89b674aba96fc
This commit is contained in:
parent
ffb507b822
commit
109abbbdaf
@ -15,7 +15,6 @@
|
||||
# under the License.
|
||||
|
||||
import abc
|
||||
import logging
|
||||
import os
|
||||
import six
|
||||
|
||||
@ -26,9 +25,6 @@ from fuel_upgrade.utils import symlink
|
||||
from fuel_upgrade.utils import symlink_if_src_exists
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class Action(object):
|
||||
"""An action interface.
|
||||
|
@ -64,7 +64,7 @@ class CheckNoRunningTasks(BaseBeforeUpgradeChecker):
|
||||
raise errors.NailgunIsNotRunningError(
|
||||
'Cannot connect to rest api service')
|
||||
|
||||
logger.debug('Nailgun tasks {0}'.format(tasks))
|
||||
logger.debug('Nailgun tasks %s', tasks)
|
||||
|
||||
running_tasks = filter(
|
||||
lambda t: t['status'] == 'running', tasks)
|
||||
@ -124,17 +124,17 @@ class CheckFreeSpace(BaseBeforeUpgradeChecker):
|
||||
logger.info('Check if devices have enough free space')
|
||||
logger.debug(
|
||||
'Required spaces from upgrade '
|
||||
'engines {0}'.format(self.required_spaces))
|
||||
'engines %s', self.required_spaces)
|
||||
|
||||
mount_points = self.space_required_for_mount_points()
|
||||
logger.debug(
|
||||
'Mount points and sum of required spaces '
|
||||
'{0}'.format(mount_points))
|
||||
'%s', mount_points)
|
||||
|
||||
error_mount_point = self.list_of_error_mount_points(mount_points)
|
||||
logger.debug(
|
||||
"Mount points which don't have "
|
||||
"enough free space {0}".format(error_mount_point))
|
||||
"enough free space %s", error_mount_point)
|
||||
|
||||
self.check_result(error_mount_point)
|
||||
|
||||
|
@ -61,7 +61,7 @@ UNCOMPATIBLE_SYSTEMS = (
|
||||
|
||||
|
||||
def handle_exception(exc):
|
||||
logger.exception(exc)
|
||||
logger.exception('%s', exc)
|
||||
|
||||
print(messages.header)
|
||||
|
||||
@ -148,7 +148,7 @@ def run_upgrade(args):
|
||||
|
||||
# Initialize config
|
||||
config = build_config(args.src, args.password)
|
||||
logger.debug('Configuration data: {0}'.format(config))
|
||||
logger.debug('Configuration data: %s', config)
|
||||
|
||||
# Initialize upgrade engines
|
||||
upgraders_to_use = [
|
||||
|
@ -76,6 +76,6 @@ class KeystoneClient(object):
|
||||
return (isinstance(resp, dict) and
|
||||
resp.get('access', {}).get('token', {}).get('id'))
|
||||
except (ValueError, requests.exceptions.RequestException) as exc:
|
||||
logger.debug('Cannot authenticate in keystone: {0}'.format(exc))
|
||||
logger.debug('Cannot authenticate in keystone: %s', exc)
|
||||
|
||||
return None
|
||||
|
@ -15,12 +15,9 @@
|
||||
# under the License.
|
||||
|
||||
import json
|
||||
import logging
|
||||
|
||||
from fuel_upgrade.clients import KeystoneClient
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class NailgunClient(object):
|
||||
"""NailgunClient is a simple wrapper around Nailgun API.
|
||||
|
@ -14,12 +14,8 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import logging
|
||||
|
||||
from fuel_upgrade.clients import KeystoneClient
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class OSTFClient(object):
|
||||
"""OSTFClient is a simple wrapper around OSTF API.
|
||||
|
@ -134,15 +134,14 @@ class SupervisorClient(object):
|
||||
self.get_all_processes_safely,
|
||||
timeout=self.config.supervisor['restart_timeout'])
|
||||
|
||||
logger.debug(u'List of supervisor processes {0}'.format(
|
||||
all_processes))
|
||||
logger.debug(u'List of supervisor processes %s', all_processes)
|
||||
|
||||
def start(self, service_name):
|
||||
"""Start the process under supervisor
|
||||
|
||||
:param str service_name: name of supervisor's process
|
||||
"""
|
||||
logger.debug(u'Start supervisor process {0}'.format(service_name))
|
||||
logger.debug(u'Start supervisor process %s', service_name)
|
||||
self.supervisor.startProcess(service_name)
|
||||
|
||||
def get_all_processes_safely(self):
|
||||
@ -168,7 +167,7 @@ class SupervisorClient(object):
|
||||
`autostart` - run the service on supervisor start
|
||||
"""
|
||||
logger.info(
|
||||
u'Generate supervisor configs for services {0}'.format(services))
|
||||
u'Generate supervisor configs for services %s', services)
|
||||
|
||||
for service in services:
|
||||
self.generate_config(
|
||||
|
@ -96,8 +96,8 @@ def from_fuel_version(current_version_path, from_version_path):
|
||||
# for some reasons interrupted
|
||||
if exists(from_version_path):
|
||||
from_version = get_version_from_config(from_version_path)
|
||||
logger.debug('Retrieve version from {0}, '
|
||||
'version is {1}'.format(from_version_path, from_version))
|
||||
logger.debug('Retrieve version from %s, '
|
||||
'version is %s', from_version_path, from_version)
|
||||
return from_version
|
||||
|
||||
return get_version_from_config(current_version_path)
|
||||
|
@ -206,7 +206,7 @@ class DockerUpgrader(UpgradeEngine):
|
||||
errors.CannotFindContainerError) as exc:
|
||||
utils.remove_if_exists(pg_dump_tmp_path)
|
||||
if not utils.file_exists(pg_dump_path):
|
||||
logger.debug('Failed to make database dump {0}'.format(exc))
|
||||
logger.debug('Failed to make database dump %s', exc)
|
||||
return False
|
||||
|
||||
logger.debug(
|
||||
@ -285,12 +285,11 @@ class DockerUpgrader(UpgradeEngine):
|
||||
logger.info(u'Start image uploading')
|
||||
|
||||
for image in self.new_release_images:
|
||||
logger.debug(u'Try to upload docker image {0}'.format(image))
|
||||
logger.debug(u'Try to upload docker image %s', image)
|
||||
|
||||
docker_image = image['docker_image']
|
||||
if not os.path.exists(docker_image):
|
||||
logger.warn(u'Cannot find docker image "{0}"'.format(
|
||||
docker_image))
|
||||
logger.warn(u'Cannot find docker image "%s"', docker_image)
|
||||
continue
|
||||
# NOTE(eli): docker-py binding
|
||||
# doesn't have equal call for
|
||||
@ -303,14 +302,13 @@ class DockerUpgrader(UpgradeEngine):
|
||||
"""
|
||||
logger.info(u'Started containers creation')
|
||||
graph = self.build_dependencies_graph(self.new_release_containers)
|
||||
logger.debug(u'Built dependencies graph {0}'.format(graph))
|
||||
logger.debug(u'Built dependencies graph %s', graph)
|
||||
containers_to_creation = utils.topological_sorting(graph)
|
||||
logger.debug(u'Resolved creation order {0}'.format(
|
||||
containers_to_creation))
|
||||
logger.debug(u'Resolved creation order %s', containers_to_creation)
|
||||
|
||||
for container_id in containers_to_creation:
|
||||
container = self.container_by_id(container_id)
|
||||
logger.debug(u'Start container {0}'.format(container))
|
||||
logger.debug(u'Start container %s', container)
|
||||
|
||||
links = self.get_container_links(container)
|
||||
|
||||
@ -481,7 +479,7 @@ class DockerUpgrader(UpgradeEngine):
|
||||
self.remove_new_release_images()
|
||||
|
||||
for image in self.new_release_images:
|
||||
logger.info(u'Start image building: {0}'.format(image))
|
||||
logger.info(u'Start image building: %s', image)
|
||||
self.docker_client.build(
|
||||
path=image['docker_file'],
|
||||
tag=image['name'],
|
||||
@ -535,7 +533,7 @@ class DockerUpgrader(UpgradeEngine):
|
||||
containers)
|
||||
|
||||
for container in containers_to_stop:
|
||||
logger.debug(u'Stop container: {0}'.format(container))
|
||||
logger.debug(u'Stop container: %s', container)
|
||||
|
||||
self.stop_container(container['Id'])
|
||||
|
||||
@ -604,7 +602,7 @@ class DockerUpgrader(UpgradeEngine):
|
||||
|
||||
:param container_id: container id
|
||||
"""
|
||||
logger.debug(u'Stop container: {0}'.format(container_id))
|
||||
logger.debug(u'Stop container: %s', container_id)
|
||||
|
||||
try:
|
||||
self.docker_client.stop(
|
||||
@ -617,7 +615,7 @@ class DockerUpgrader(UpgradeEngine):
|
||||
# container was stopped.
|
||||
logger.warn(
|
||||
u'Couldn\'t stop ctonainer, try '
|
||||
'to stop it again: {0}'.format(container_id))
|
||||
'to stop it again: %s', container_id)
|
||||
self.docker_client.stop(
|
||||
container_id, self.config.docker['stop_container_timeout'])
|
||||
|
||||
@ -627,8 +625,7 @@ class DockerUpgrader(UpgradeEngine):
|
||||
:param container: container name
|
||||
:param params: dict of arguments for container starting
|
||||
"""
|
||||
logger.debug(u'Start container "{0}": {1}'.format(
|
||||
container['Id'], params))
|
||||
logger.debug(u'Start container "%s": %s', container['Id'], params)
|
||||
self.docker_client.start(container['Id'], **params)
|
||||
|
||||
def create_container(self, image_name, **params):
|
||||
@ -648,8 +645,8 @@ class DockerUpgrader(UpgradeEngine):
|
||||
new_params = deepcopy(params)
|
||||
new_params['ports'] = self.get_ports(new_params)
|
||||
|
||||
logger.debug(u'Create container from image {0}: {1}'.format(
|
||||
image_name, new_params))
|
||||
logger.debug(u'Create container from image %s: %s',
|
||||
image_name, new_params)
|
||||
|
||||
def func_create():
|
||||
return self.docker_client.create_container(
|
||||
@ -790,8 +787,7 @@ class DockerUpgrader(UpgradeEngine):
|
||||
for image in image_names:
|
||||
self._delete_containers_for_image(image)
|
||||
if self.docker_client.images(name=image):
|
||||
logger.info(u'Remove image for new version {0}'.format(
|
||||
image))
|
||||
logger.info(u'Remove image for new version %s', image)
|
||||
self.docker_client.remove_image(image)
|
||||
|
||||
def _delete_container_if_exist(self, container_name):
|
||||
@ -803,7 +799,7 @@ class DockerUpgrader(UpgradeEngine):
|
||||
|
||||
for container in found_containers:
|
||||
self.stop_container(container['Id'])
|
||||
logger.debug(u'Delete container {0}'.format(container))
|
||||
logger.debug(u'Delete container %s', container)
|
||||
|
||||
# TODO(eli): refactor it and make retries
|
||||
# as a decorator
|
||||
@ -836,11 +832,11 @@ class DockerUpgrader(UpgradeEngine):
|
||||
all_containers)
|
||||
|
||||
for container in containers:
|
||||
logger.debug(u'Try to stop container {0} which '
|
||||
'depends on image {1}'.format(container['Id'], image))
|
||||
logger.debug(u'Try to stop container %s which '
|
||||
'depends on image %s', container['Id'], image)
|
||||
self.docker_client.stop(container['Id'])
|
||||
logger.debug(u'Delete container {0} which '
|
||||
'depends on image {1}'.format(container['Id'], image))
|
||||
logger.debug(u'Delete container %s which '
|
||||
'depends on image %s', container['Id'], image)
|
||||
self.docker_client.remove_container(container['Id'])
|
||||
|
||||
|
||||
|
@ -14,14 +14,11 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
from fuel_upgrade.engines.base import UpgradeEngine
|
||||
from fuel_upgrade import utils
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HostSystemUpgrader(UpgradeEngine):
|
||||
"""Upgrader for master node host system.
|
||||
|
@ -179,7 +179,7 @@ class OpenStackUpgrader(UpgradeEngine):
|
||||
except (
|
||||
requests.exceptions.HTTPError
|
||||
) as exc:
|
||||
logger.exception(six.text_type(exc))
|
||||
logger.exception('%s', six.text_type(exc))
|
||||
|
||||
for notif_id in reversed(self._rollback_ids['notification']):
|
||||
try:
|
||||
@ -188,7 +188,7 @@ class OpenStackUpgrader(UpgradeEngine):
|
||||
except (
|
||||
requests.exceptions.HTTPError
|
||||
) as exc:
|
||||
logger.exception(six.text_type(exc))
|
||||
logger.exception('%s', six.text_type(exc))
|
||||
|
||||
def _reset_state(self):
|
||||
"""Remove rollback IDs from the arrays.
|
||||
|
@ -17,8 +17,49 @@
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from fuel_upgrade.config import Config
|
||||
from fuel_upgrade.utils import sanitize
|
||||
|
||||
|
||||
class SanitizingLogger(logging.Logger):
|
||||
"""Logger subclass which sanitizes passed positional arguments.
|
||||
It traverses the arguments one by one and recursively looks them up for
|
||||
dictionaries. If a key of the dictionary contains a keyword listed in
|
||||
`SanitizingLogger.keywords` corresponding value is masked.
|
||||
Instances of the following types are sanitized:
|
||||
- dict
|
||||
- list containing dicts
|
||||
- fuel_upgrade.config.Config
|
||||
arguments of other types are not changed.
|
||||
|
||||
Example:
|
||||
|
||||
>>> auth_params = {'password': 'secure_password'}
|
||||
>>> auth_info = [{'admin_token': 'secure_token'}]
|
||||
>>> logging.setLoggerClass(SanitizingLogger)
|
||||
>>> logger = logging.getLogger()
|
||||
>>> logger.info("%s %s %s %s", 'Auth', 'password:', auth_params, auth_info)
|
||||
Auth password: {'password': '******'} [{'admin_token': '******'}]
|
||||
"""
|
||||
|
||||
keywords = ('password', 'token')
|
||||
|
||||
def makeRecord(self, name, level, fn, lno, msg, args, exc_info, func=None,
|
||||
extra=None):
|
||||
_args = []
|
||||
for arg in args:
|
||||
if isinstance(arg, Config):
|
||||
_arg = sanitize(arg._config, self.keywords)
|
||||
else:
|
||||
_arg = sanitize(arg, self.keywords)
|
||||
_args.append(_arg)
|
||||
|
||||
return logging.Logger.makeRecord(self, name, level, fn, lno, msg,
|
||||
tuple(_args), exc_info, func, extra)
|
||||
|
||||
|
||||
def configure_logger(path):
|
||||
logging.setLoggerClass(SanitizingLogger)
|
||||
logger = logging.getLogger('fuel_upgrade')
|
||||
logger.setLevel(logging.DEBUG)
|
||||
formatter = logging.Formatter(
|
||||
|
@ -14,6 +14,7 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from copy import deepcopy
|
||||
import StringIO
|
||||
import subprocess
|
||||
import urllib2
|
||||
@ -28,6 +29,7 @@ from fuel_upgrade.utils import create_dir_if_not_exists
|
||||
from fuel_upgrade.utils import exec_cmd
|
||||
from fuel_upgrade.utils import exec_cmd_iterator
|
||||
from fuel_upgrade.utils import get_request
|
||||
from fuel_upgrade.utils import sanitize
|
||||
from fuel_upgrade.utils import topological_sorting
|
||||
from fuel_upgrade.utils import wait_for_true
|
||||
|
||||
@ -634,3 +636,82 @@ class TestVersionedFile(BaseTestCase):
|
||||
self.assertEqual(
|
||||
self.versioned_file.sorted_files(),
|
||||
['/tmp/path.ext.10', '/tmp/path.ext.6'])
|
||||
|
||||
|
||||
class TestSanitizer(BaseTestCase):
|
||||
original = {
|
||||
'admin_password': 'r00tme',
|
||||
'not_a_pass': 1,
|
||||
'nested': {
|
||||
'password_here': 'JuYReDm4',
|
||||
'nested_again': [
|
||||
'apassword!',
|
||||
'jcqOyKEf',
|
||||
{'login': 'root', 'a_password': '8xMflcaD', 'x': 55.2},
|
||||
{
|
||||
'and_again': {
|
||||
'UPPERCASE_PASSWORD': 'VpE8gqKN',
|
||||
'password_as_list': ['it', 'will', 'be', 'changed'],
|
||||
'admin_token': 'Ab8ph9qO'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
expected = {
|
||||
'admin_password': '******',
|
||||
'not_a_pass': 1,
|
||||
'nested': {
|
||||
'password_here': '******',
|
||||
'nested_again': [
|
||||
'apassword!',
|
||||
'jcqOyKEf',
|
||||
{'login': 'root', 'a_password': '******', 'x': 55.2},
|
||||
{
|
||||
'and_again': {
|
||||
'UPPERCASE_PASSWORD': 'VpE8gqKN',
|
||||
'password_as_list': '******',
|
||||
'admin_token': '******'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
expected_custom_mask = {
|
||||
'admin_password': 'XXX',
|
||||
'not_a_pass': 1,
|
||||
'nested': {
|
||||
'password_here': 'XXX',
|
||||
'nested_again': [
|
||||
'apassword!',
|
||||
'jcqOyKEf',
|
||||
{'login': 'root', 'a_password': 'XXX', 'x': 55.2},
|
||||
{
|
||||
'and_again': {
|
||||
'UPPERCASE_PASSWORD': 'VpE8gqKN',
|
||||
'password_as_list': 'XXX',
|
||||
'admin_token': 'XXX'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
def test_hide_data(self):
|
||||
self.assertEqual(
|
||||
sanitize(self.original, ['password', 'token']),
|
||||
self.expected
|
||||
)
|
||||
|
||||
def test_original_object_unchanged(self):
|
||||
copy_conf = deepcopy(self.original)
|
||||
sanitize(self.original, ['password', 'token'])
|
||||
self.assertEqual(self.original, copy_conf)
|
||||
|
||||
def test_custom_mask(self):
|
||||
self.assertEqual(
|
||||
sanitize(self.original, ['password', 'token'], mask='XXX'),
|
||||
self.expected_custom_mask
|
||||
)
|
||||
|
@ -44,13 +44,13 @@ def exec_cmd(cmd):
|
||||
|
||||
:param cmd: shell command
|
||||
"""
|
||||
logger.debug(u'Execute command "{0}"'.format(cmd))
|
||||
logger.debug(u'Execute command "%s"', cmd)
|
||||
child = subprocess.Popen(
|
||||
cmd, stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
shell=True)
|
||||
|
||||
logger.debug(u'Stdout and stderr of command "{0}":'.format(cmd))
|
||||
logger.debug(u'Stdout and stderr of command "%s":', cmd)
|
||||
for line in child.stdout:
|
||||
logger.debug(line.rstrip())
|
||||
|
||||
@ -78,13 +78,13 @@ def exec_cmd_iterator(cmd):
|
||||
:returns: generator where yeach item
|
||||
is line from stdout
|
||||
"""
|
||||
logger.debug(u'Execute command "{0}"'.format(cmd))
|
||||
logger.debug(u'Execute command "%s"', cmd)
|
||||
child = subprocess.Popen(
|
||||
cmd, stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
shell=True)
|
||||
|
||||
logger.debug(u'Stdout and stderr of command "{0}":'.format(cmd))
|
||||
logger.debug(u'Stdout and stderr of command "%s":', cmd)
|
||||
for line in child.stdout:
|
||||
logger.debug(line.rstrip())
|
||||
yield line
|
||||
@ -107,7 +107,7 @@ def _wait_and_check_exit_code(cmd, child):
|
||||
u'Shell command executed with "{0}" '
|
||||
'exit code: {1} '.format(exit_code, cmd))
|
||||
|
||||
logger.debug(u'Command "{0}" successfully executed'.format(cmd))
|
||||
logger.debug(u'Command "%s" successfully executed', cmd)
|
||||
|
||||
|
||||
def get_request(url):
|
||||
@ -116,12 +116,12 @@ def get_request(url):
|
||||
:param url: url
|
||||
:returns list|dict: deserialized response
|
||||
"""
|
||||
logger.debug('GET request to {0}'.format(url))
|
||||
logger.debug('GET request to %s', url)
|
||||
response = urllib2.urlopen(url)
|
||||
response_data = response.read()
|
||||
response_code = response.getcode()
|
||||
logger.debug('GET response from {0}, code {1}, data: {2}'.format(
|
||||
url, response_code, response_data))
|
||||
logger.debug('GET response from %s, code %d, data: %s',
|
||||
url, response_code, response_data)
|
||||
|
||||
return json.loads(response_data), response_code
|
||||
|
||||
@ -171,8 +171,8 @@ def render_template_to_file(src, dst, params):
|
||||
:param src: path to template
|
||||
:param dst: path where rendered template will be saved
|
||||
"""
|
||||
logger.debug('Render template from {0} to {1} with params: {2}'.format(
|
||||
src, dst, params))
|
||||
logger.debug('Render template from %s to %s with params: %s',
|
||||
src, dst, params)
|
||||
with open(src, 'r') as f:
|
||||
template_cfg = f.read()
|
||||
|
||||
@ -259,7 +259,7 @@ def remove_if_exists(path):
|
||||
:param path: path to file for removal
|
||||
"""
|
||||
if os.path.exists(path):
|
||||
logger.debug(u'Remove file "{0}"'.format(path))
|
||||
logger.debug(u'Remove file "%s"', path)
|
||||
os.remove(path)
|
||||
|
||||
|
||||
@ -273,8 +273,7 @@ def file_contains_lines(file_path, patterns):
|
||||
False if file doesn't match one or more patterns
|
||||
"""
|
||||
logger.debug(
|
||||
u'Check if file "{0}" matches to pattern "{1}"'.format(
|
||||
file_path, patterns))
|
||||
u'Check if file "%s" matches to pattern "%s"', file_path, patterns)
|
||||
|
||||
regexps = [re.compile(pattern) for pattern in patterns]
|
||||
|
||||
@ -286,8 +285,7 @@ def file_contains_lines(file_path, patterns):
|
||||
del regexps[i]
|
||||
|
||||
if regexps:
|
||||
logger.warn('Cannot find lines {0} in file {1}'.format(
|
||||
regexps, file_path))
|
||||
logger.warn('Cannot find lines %s in file %s', regexps, file_path)
|
||||
return False
|
||||
|
||||
return True
|
||||
@ -301,8 +299,8 @@ def copy_if_does_not_exist(from_path, to_path):
|
||||
"""
|
||||
if os.path.exists(to_path):
|
||||
logger.debug(
|
||||
'Skip file copying, because file {0} '
|
||||
'already exists'.format(to_path))
|
||||
'Skip file copying, because file %s '
|
||||
'already exists', to_path)
|
||||
return
|
||||
|
||||
copy(from_path, to_path)
|
||||
@ -316,8 +314,8 @@ def copy_if_exists(from_path, to_path):
|
||||
"""
|
||||
if not os.path.exists(from_path):
|
||||
logger.debug(
|
||||
'Skip file copying, because file {0} '
|
||||
'does not exist'.format(from_path))
|
||||
'Skip file copying, because file %s '
|
||||
'does not exist', from_path)
|
||||
return
|
||||
|
||||
copy(from_path, to_path)
|
||||
@ -737,6 +735,33 @@ def get_base_release(release, depends_on, existing_releases):
|
||||
return None
|
||||
|
||||
|
||||
def sanitize(obj, keywords, mask='******'):
|
||||
"""Find and hide private data in obj using keywords.
|
||||
|
||||
:param obj: object to be sanitized
|
||||
:param keywords: describe keywords to be found in obj
|
||||
:param mask: a string for substitution of sanitized values
|
||||
:return: sanitized copy of obj
|
||||
"""
|
||||
def _helper(_obj):
|
||||
if isinstance(_obj, dict):
|
||||
for option in _obj:
|
||||
if any([key in option for key in keywords]):
|
||||
_obj[option] = mask
|
||||
else:
|
||||
_helper(_obj[option])
|
||||
|
||||
elif isinstance(_obj, (list, set, tuple)):
|
||||
for value in _obj:
|
||||
_helper(value)
|
||||
|
||||
return _obj
|
||||
|
||||
# Making sure the original object remains untouched
|
||||
obj_copy = deepcopy(obj)
|
||||
return _helper(obj_copy)
|
||||
|
||||
|
||||
class VersionedFile(object):
|
||||
"""Set of methods for versioned files.
|
||||
If `basename` is '/tmp/file.ext' it allows
|
||||
|
Loading…
Reference in New Issue
Block a user