Set a fixed order in which data is dumped to YAML files
One of the well-known issues of Python is that dictionaries do not maintain order in their keys once created. This causes YAML data dumps to output in a seemingly random order or alphabetically. As these output files are often kept in their own repositories, they must go through review or comparison in VCS. If the order of keys is switching for these files every time Pegleg is ran, it makes it difficult for a user to compare newly generated files with the old. To fix this issue, we can change all dictionaries used to template YAML files into OrderedDict objects. The OrderedDict objects will maintain order through their dumping to YAML. Change-Id: I0c1ee3f3f37ed8598d2ba81528d5c61447cbd0d0
This commit is contained in:
parent
174e356214
commit
eb6c2574bc
@ -12,6 +12,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from collections import OrderedDict
|
||||
import datetime
|
||||
import json
|
||||
import logging
|
||||
@ -315,23 +316,24 @@ class PKIUtility(object):
|
||||
"""
|
||||
|
||||
wrapped_schema = 'deckhand/%s/v1' % kind
|
||||
wrapped_metadata = {
|
||||
'schema': 'metadata/Document/v1',
|
||||
'name': name,
|
||||
'layeringDefinition': {
|
||||
'abstract': False,
|
||||
'layer': 'site',
|
||||
},
|
||||
'storagePolicy': 'cleartext'
|
||||
}
|
||||
wrapped_metadata = OrderedDict(
|
||||
[
|
||||
('schema', 'metadata/Document/v1'), ('name', name),
|
||||
(
|
||||
'layeringDefinition',
|
||||
OrderedDict([
|
||||
('abstract', False),
|
||||
('layer', 'site'),
|
||||
])), ('storagePolicy', 'cleartext')
|
||||
])
|
||||
wrapped_data = PKIUtility._block_literal(
|
||||
data, block_strings=block_strings)
|
||||
|
||||
document = {
|
||||
"schema": wrapped_schema,
|
||||
"metadata": wrapped_metadata,
|
||||
"data": wrapped_data
|
||||
}
|
||||
document = OrderedDict(
|
||||
[
|
||||
("schema", wrapped_schema), ("metadata", wrapped_metadata),
|
||||
("data", wrapped_data)
|
||||
])
|
||||
|
||||
return PeglegManagedSecretsDocument(document).pegleg_document
|
||||
|
||||
|
@ -13,6 +13,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
from abc import ABC
|
||||
from collections import OrderedDict
|
||||
import logging
|
||||
import os
|
||||
|
||||
@ -54,19 +55,20 @@ class BaseGenerator(ABC):
|
||||
:param str storage_policy: Storage policy for the secret data
|
||||
:param str secret_data: The data to be stored in this document.
|
||||
"""
|
||||
return {
|
||||
'schema': 'deckhand/{}/v1'.format(kind),
|
||||
'metadata': {
|
||||
'schema': 'metadata/Document/v1',
|
||||
'name': name,
|
||||
'layeringDefinition': {
|
||||
'abstract': False,
|
||||
'layer': 'site',
|
||||
},
|
||||
'storagePolicy': storage_policy,
|
||||
},
|
||||
'data': secret_data,
|
||||
}
|
||||
layering_definition = OrderedDict(
|
||||
[('abstract', False), ('layer', 'site')])
|
||||
metadata = OrderedDict(
|
||||
[
|
||||
('schema', 'metadata/Document/v1'), ('name', name),
|
||||
('layeringDefinition', layering_definition),
|
||||
('storagePolicy', storage_policy)
|
||||
])
|
||||
data = OrderedDict(
|
||||
[
|
||||
('schema', 'deckhand/{}/v1'.format(kind)),
|
||||
('metadata', metadata), ('data', secret_data)
|
||||
])
|
||||
return data
|
||||
|
||||
def get_save_path(self, passphrase_name):
|
||||
"""Calculate and return the save path of the ``passphrase_name``."""
|
||||
|
@ -11,7 +11,7 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from collections import OrderedDict
|
||||
from glob import glob
|
||||
import logging
|
||||
import os
|
||||
@ -193,19 +193,23 @@ def wrap_secret(
|
||||
with open(filename, 'r') as in_fi:
|
||||
data = in_fi.read()
|
||||
|
||||
inner_doc = {
|
||||
"schema": schema,
|
||||
"data": data,
|
||||
"metadata": {
|
||||
"layeringDefinition": {
|
||||
"abstract": False,
|
||||
"layer": layer
|
||||
},
|
||||
"name": name,
|
||||
"schema": "metadata/Document/v1",
|
||||
"storagePolicy": "encrypted" if encrypt else "cleartext"
|
||||
}
|
||||
}
|
||||
inner_doc = OrderedDict(
|
||||
[
|
||||
("schema", schema), ("data", data),
|
||||
(
|
||||
"metadata",
|
||||
OrderedDict(
|
||||
[
|
||||
(
|
||||
"layeringDefinition",
|
||||
OrderedDict(
|
||||
[("abstract", False), ("layer", layer)])),
|
||||
("name", name), ("schema", "metadata/Document/v1"),
|
||||
(
|
||||
"storagePolicy",
|
||||
"encrypted" if encrypt else "cleartext")
|
||||
]))
|
||||
])
|
||||
managed_secret = PeglegManagedSecret(inner_doc, author=author)
|
||||
if encrypt:
|
||||
psm = PeglegSecretManagement(
|
||||
@ -213,7 +217,7 @@ def wrap_secret(
|
||||
output_doc = psm.get_encrypted_secrets()[0][0]
|
||||
else:
|
||||
output_doc = managed_secret.pegleg_document
|
||||
files.safe_dump(output_doc, output_path)
|
||||
files.safe_dump(output_doc, output_path, sort_keys=False)
|
||||
|
||||
|
||||
def check_cert_expiry(site_name, duration=60):
|
||||
|
@ -12,6 +12,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from collections import OrderedDict
|
||||
import logging
|
||||
import os
|
||||
|
||||
@ -24,6 +25,7 @@ from yaml.constructor import SafeConstructor
|
||||
from pegleg import config
|
||||
from pegleg.engine import util
|
||||
from pegleg.engine.util import files
|
||||
from pegleg.engine.util.files import add_representer_ordered_dict
|
||||
|
||||
__all__ = ('collect', 'list_', 'show', 'render')
|
||||
|
||||
@ -50,6 +52,7 @@ def _collect_to_stdout(site_name):
|
||||
for line in _read_and_format_yaml(filename):
|
||||
# This code is a pattern to convert \r\n to \n.
|
||||
click.echo("\n".join(line.splitlines()))
|
||||
add_representer_ordered_dict()
|
||||
res = yaml.safe_dump(
|
||||
_get_deployment_data_doc(),
|
||||
explicit_start=True,
|
||||
@ -81,6 +84,7 @@ def _collect_to_file(site_name, save_location):
|
||||
save_files[repo_name] = open(save_file, 'w')
|
||||
LOG.debug("Collecting file %s to file %s", filename, save_file)
|
||||
save_files[repo_name].writelines(_read_and_format_yaml(filename))
|
||||
add_representer_ordered_dict()
|
||||
save_files[curr_site_repo].writelines(
|
||||
yaml.safe_dump(
|
||||
_get_deployment_data_doc(),
|
||||
@ -129,6 +133,7 @@ def render(site_name, output_stream, validate):
|
||||
explicit_start=True,
|
||||
explicit_end=True)
|
||||
else:
|
||||
add_representer_ordered_dict()
|
||||
click.echo(
|
||||
yaml.dump_all(
|
||||
rendered_documents,
|
||||
@ -185,21 +190,22 @@ def _get_deployment_data_doc():
|
||||
files.path_leaf(repo): _get_repo_deployment_data_stanza(repo)
|
||||
for repo in config.all_repos()
|
||||
}
|
||||
return {
|
||||
"schema": "pegleg/DeploymentData/v1",
|
||||
"metadata": {
|
||||
"schema": "metadata/Document/v1",
|
||||
"name": "deployment-version",
|
||||
"layeringDefinition": {
|
||||
"abstract": False,
|
||||
"layer": "global"
|
||||
},
|
||||
"storagePolicy": "cleartext",
|
||||
},
|
||||
"data": {
|
||||
"documents": stanzas
|
||||
}
|
||||
}
|
||||
return OrderedDict(
|
||||
[
|
||||
("schema", "pegleg/DeploymentData/v1"),
|
||||
(
|
||||
"metadata",
|
||||
OrderedDict(
|
||||
[
|
||||
("schema", "metadata/Document/v1"),
|
||||
("name", "deployment-version"),
|
||||
(
|
||||
"layeringDefinition",
|
||||
OrderedDict(
|
||||
[("abstract", False), ("layer", "global")])),
|
||||
("storagePolicy", "cleartext"),
|
||||
])), ("data", OrderedDict([("documents", stanzas)]))
|
||||
])
|
||||
|
||||
|
||||
def _get_repo_deployment_data_stanza(repo_path):
|
||||
|
@ -240,6 +240,7 @@ def slurp(path):
|
||||
|
||||
|
||||
def dump(data, path, flag='w', **kwargs):
|
||||
add_representer_ordered_dict()
|
||||
os.makedirs(os.path.dirname(os.path.abspath(path)), exist_ok=True)
|
||||
with open(path, flag) as f:
|
||||
|
||||
@ -247,6 +248,7 @@ def dump(data, path, flag='w', **kwargs):
|
||||
|
||||
|
||||
def safe_dump(data, path, flag='w', **kwargs):
|
||||
add_representer_ordered_dict()
|
||||
os.makedirs(os.path.dirname(os.path.abspath(path)), exist_ok=True)
|
||||
with open(path, flag) as f:
|
||||
|
||||
@ -254,6 +256,7 @@ def safe_dump(data, path, flag='w', **kwargs):
|
||||
|
||||
|
||||
def dump_all(data, path, flag='w', **kwargs):
|
||||
add_representer_ordered_dict()
|
||||
os.makedirs(os.path.dirname(os.path.abspath(path)), exist_ok=True)
|
||||
with open(path, flag) as f:
|
||||
|
||||
@ -308,7 +311,7 @@ def read(path):
|
||||
raise click.ClickException('Failed to parse %s:\n%s' % (path, e))
|
||||
|
||||
|
||||
def write(data, file_path):
|
||||
def write(data, file_path, sort_keys=False):
|
||||
"""
|
||||
Write the data to destination file_path.
|
||||
|
||||
@ -319,7 +322,10 @@ def write(data, file_path):
|
||||
:type file_path: str
|
||||
:param data: data to be written to the destination file
|
||||
:type data: str, dict, or a list of dicts
|
||||
:param sort_keys: sort keys alphabetically in output yaml
|
||||
:type sort_keys: bool
|
||||
"""
|
||||
add_representer_ordered_dict()
|
||||
try:
|
||||
os.makedirs(os.path.dirname(os.path.abspath(file_path)), exist_ok=True)
|
||||
with open(file_path, 'w') as stream:
|
||||
@ -331,6 +337,7 @@ def write(data, file_path):
|
||||
yaml.safe_dump_all(
|
||||
data,
|
||||
stream,
|
||||
sort_keys=sort_keys,
|
||||
explicit_start=True,
|
||||
explicit_end=True,
|
||||
default_flow_style=False)
|
||||
@ -343,6 +350,17 @@ def write(data, file_path):
|
||||
"Couldn't write data to {}: {}".format(file_path, e))
|
||||
|
||||
|
||||
def add_representer_ordered_dict():
|
||||
yaml.add_representer(
|
||||
collections.OrderedDict,
|
||||
lambda dumper, dict_data: dumper.represent_mapping(
|
||||
'tag:yaml.org,2002:map', dict_data.items()))
|
||||
yaml.add_representer(
|
||||
collections.OrderedDict,
|
||||
lambda dumper, dict_data: dumper.represent_mapping(
|
||||
'tag:yaml.org,2002:map', dict_data.items()), yaml.SafeDumper)
|
||||
|
||||
|
||||
def _recurse_subdirs(search_path, depth):
|
||||
directories = set()
|
||||
try:
|
||||
|
@ -11,7 +11,7 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from collections import OrderedDict
|
||||
from datetime import datetime
|
||||
import logging
|
||||
|
||||
@ -71,28 +71,36 @@ class PeglegManagedSecretsDocument(object):
|
||||
document.
|
||||
:rtype: dict
|
||||
"""
|
||||
|
||||
doc = {
|
||||
'schema': PEGLEG_MANAGED_SCHEMA,
|
||||
'metadata': {
|
||||
'name': secrets_document['metadata']['name'],
|
||||
'schema': 'metadata/Document/v1',
|
||||
'labels': secrets_document['metadata'].get('labels', {}),
|
||||
'layeringDefinition': {
|
||||
'abstract': False,
|
||||
layering_definition = OrderedDict(
|
||||
[
|
||||
('abstract', False),
|
||||
# The current requirement only requires site layer.
|
||||
'layer': 'site',
|
||||
},
|
||||
'storagePolicy': 'cleartext'
|
||||
},
|
||||
'data': {
|
||||
'managedDocument': {
|
||||
'schema': secrets_document['schema'],
|
||||
'metadata': secrets_document['metadata'],
|
||||
'data': secrets_document['data']
|
||||
}
|
||||
}
|
||||
}
|
||||
('layer', 'site')
|
||||
])
|
||||
metadata = OrderedDict(
|
||||
[
|
||||
('name', secrets_document['metadata']['name']),
|
||||
('schema', 'metadata/Document/v1'),
|
||||
('labels', secrets_document['metadata'].get('labels', {})),
|
||||
('layeringDefinition', layering_definition),
|
||||
('storagePolicy', 'cleartext')
|
||||
])
|
||||
data = OrderedDict(
|
||||
[
|
||||
(
|
||||
'managedDocument',
|
||||
OrderedDict(
|
||||
[
|
||||
('schema', secrets_document['schema']),
|
||||
('metadata', secrets_document['metadata']),
|
||||
('data', secrets_document['data'])
|
||||
]))
|
||||
])
|
||||
doc = OrderedDict(
|
||||
[
|
||||
('schema', PEGLEG_MANAGED_SCHEMA), ('metadata', metadata),
|
||||
('data', data)
|
||||
])
|
||||
|
||||
if generated:
|
||||
doc['data'][GENERATED] = {
|
||||
|
@ -21,6 +21,7 @@ from pegleg import config
|
||||
from pegleg.engine.util.encryption import decrypt
|
||||
from pegleg.engine.util.encryption import encrypt
|
||||
from pegleg.engine.util import files
|
||||
from pegleg.engine.util.files import add_representer_ordered_dict
|
||||
from pegleg.engine.util.pegleg_managed_document import \
|
||||
PeglegManagedSecretsDocument as PeglegManagedSecret
|
||||
|
||||
@ -167,11 +168,12 @@ class PeglegSecretManagement(object):
|
||||
"""Decrypt and unwrap pegleg managed encrypted secrets documents
|
||||
included in a site secrets file, and print the result to the standard
|
||||
out."""
|
||||
|
||||
add_representer_ordered_dict()
|
||||
secrets = self.get_decrypted_secrets()
|
||||
|
||||
return yaml.safe_dump_all(
|
||||
secrets,
|
||||
sort_keys=False,
|
||||
explicit_start=True,
|
||||
explicit_end=True,
|
||||
default_flow_style=False)
|
||||
|
@ -23,6 +23,7 @@ import yaml
|
||||
|
||||
from pegleg.engine import exceptions
|
||||
from pegleg.engine.util import files
|
||||
from pegleg.engine.util.files import add_representer_ordered_dict
|
||||
from pegleg.engine.util.pegleg_secret_management import PeglegSecretManagement
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
@ -83,6 +84,7 @@ class ShipyardHelper(object):
|
||||
docs=collected_documents[document])
|
||||
decrypted_documents = pegleg_secret_mgmt.get_decrypted_secrets()
|
||||
collection_data.extend(decrypted_documents)
|
||||
add_representer_ordered_dict()
|
||||
collection_as_yaml = yaml.dump_all(
|
||||
collection_data, Dumper=yaml.SafeDumper)
|
||||
|
||||
|
@ -13,6 +13,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
from collections import OrderedDict
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
@ -43,42 +44,53 @@ DATA = {
|
||||
]
|
||||
}
|
||||
|
||||
MULTI_REPO_DATA = {
|
||||
'repo1': [
|
||||
{
|
||||
'schema': 'pegleg/SiteDefinition/v1',
|
||||
'metadata': {
|
||||
'schema': 'metadata/Document/v1',
|
||||
'layeringDefinition': {
|
||||
'abstract': False,
|
||||
'layer': 'site'
|
||||
},
|
||||
'name': 'site-name',
|
||||
'storagePolicy': 'cleartext'
|
||||
},
|
||||
'data': {
|
||||
'site_type': 'foundry'
|
||||
}
|
||||
}
|
||||
],
|
||||
'repo2': [
|
||||
{
|
||||
'schema': 'pegleg/SiteDefinition/v1',
|
||||
'metadata': {
|
||||
'schema': 'metadata/Document/v1',
|
||||
'layeringDefinition': {
|
||||
'abstract': False,
|
||||
'layer': 'site'
|
||||
},
|
||||
'name': 'site-name',
|
||||
'storagePolicy': 'cleartext'
|
||||
},
|
||||
'data': {
|
||||
'site_type': 'foundry'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
MULTI_REPO_DATA = OrderedDict(
|
||||
[
|
||||
(
|
||||
'repo1', [
|
||||
OrderedDict(
|
||||
[
|
||||
('schema', 'pegleg/SiteDefinition/v1'),
|
||||
(
|
||||
'metadata',
|
||||
OrderedDict(
|
||||
[
|
||||
('schema', 'metadata/Document/v1'),
|
||||
(
|
||||
'layeringDefinition',
|
||||
OrderedDict(
|
||||
[
|
||||
('abstract', False),
|
||||
('layer', 'site')
|
||||
])), ('name', 'site-name'),
|
||||
('storagePolicy', 'cleartext')
|
||||
])),
|
||||
('data', OrderedDict([('site_type', 'foundry')]))
|
||||
])
|
||||
]),
|
||||
(
|
||||
'repo2', [
|
||||
OrderedDict(
|
||||
[
|
||||
('schema', 'pegleg/SiteDefinition/v1'),
|
||||
(
|
||||
'metadata',
|
||||
OrderedDict(
|
||||
[
|
||||
('schema', 'metadata/Document/v1'),
|
||||
(
|
||||
'layeringDefinition',
|
||||
OrderedDict(
|
||||
[
|
||||
('abstract', False),
|
||||
('layer', 'site')
|
||||
])), ('name', 'site-name'),
|
||||
('storagePolicy', 'cleartext')
|
||||
])),
|
||||
('data', OrderedDict([('site_type', 'foundry')]))
|
||||
])
|
||||
])
|
||||
])
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
|
Loading…
Reference in New Issue
Block a user