Add common code for node attributes
In order to facilitace adding different CLI commands for managing different attributes of nodes there is a need to add some common code that will allow to store and load attributes along with the code that allow to find proper file path to them. Change-Id: I82e61de64f75a582f2ca49bfde08fda041586f04
This commit is contained in:
parent
175c5467d0
commit
94ba15eef4
|
@ -33,6 +33,9 @@ class NodeMixIn(object):
|
|||
'supported_hugepages',
|
||||
'distances')
|
||||
|
||||
supported_file_formats = ('json', 'yaml')
|
||||
allowed_attr_types = ('attributes', 'disks', 'interfaces')
|
||||
|
||||
@classmethod
|
||||
def get_numa_topology_info(cls, data):
|
||||
numa_topology_info = {}
|
||||
|
@ -41,6 +44,29 @@ class NodeMixIn(object):
|
|||
numa_topology_info[key] = numa_topology.get(key)
|
||||
return numa_topology_info
|
||||
|
||||
def get_attributes_path(self, attr_type, file_format, node_id, directory):
|
||||
"""Returnes a path for attributes of a node
|
||||
|
||||
:param attr_type: Attribute type.
|
||||
Should be one of {attributes, disks, interfaces}
|
||||
:param file_format: The format of the file that contains or will
|
||||
contain the attributes. Must be json or yaml.
|
||||
:param node_id: Id of a node
|
||||
:param directory: Directory that is used to store attributes.
|
||||
|
||||
"""
|
||||
if attr_type not in self.allowed_attr_types:
|
||||
raise ValueError('attr_type must be '
|
||||
'one of {}'.format(self.allowed_attr_types))
|
||||
|
||||
if file_format not in self.supported_file_formats:
|
||||
raise ValueError('file_format must be '
|
||||
'one of {}'.format(self.supported_file_formats))
|
||||
|
||||
return os.path.join(os.path.abspath(directory),
|
||||
'node_{0}'.format(node_id),
|
||||
'{}.{}'.format(attr_type, file_format))
|
||||
|
||||
|
||||
class NodeList(NodeMixIn, base.BaseListCommand):
|
||||
"""Show list of all available nodes."""
|
||||
|
|
|
@ -12,7 +12,11 @@
|
|||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import json
|
||||
import yaml
|
||||
|
||||
from fuelclient.cli import error
|
||||
from fuelclient import utils
|
||||
|
||||
|
||||
def get_display_data_single(fields, data):
|
||||
|
@ -38,3 +42,31 @@ def get_display_data_multi(fields, data):
|
|||
"""Performs slice of data by set of given fields for multiple objects."""
|
||||
|
||||
return [get_display_data_single(fields, elem) for elem in data]
|
||||
|
||||
|
||||
def safe_load(data_format, stream):
|
||||
loaders = {'json': utils.safe_deserialize(json.load),
|
||||
'yaml': utils.safe_deserialize(yaml.safe_load)}
|
||||
|
||||
if data_format not in loaders:
|
||||
raise ValueError('Unsupported data format.')
|
||||
|
||||
loader = loaders[data_format]
|
||||
return loader(stream)
|
||||
|
||||
|
||||
def safe_dump(data_format, stream, data):
|
||||
# The reason these dumpers are assigned to indivisual variables is
|
||||
# making PEP8 check happy.
|
||||
yaml_dumper = lambda data, stream: yaml.safe_dump(data,
|
||||
stream,
|
||||
default_flow_style=False)
|
||||
json_dumper = lambda data, stream: json.dump(data, stream, indent=4)
|
||||
dumpers = {'json': json_dumper,
|
||||
'yaml': yaml_dumper}
|
||||
|
||||
if data_format not in dumpers:
|
||||
raise ValueError('Unsupported data format.')
|
||||
|
||||
dumper = dumpers[data_format]
|
||||
dumper(data, stream)
|
||||
|
|
|
@ -18,6 +18,7 @@ import json
|
|||
import os
|
||||
import six
|
||||
import subprocess
|
||||
import yaml
|
||||
|
||||
import mock
|
||||
import requests
|
||||
|
@ -260,3 +261,71 @@ class TestUtils(base.UnitTestCase):
|
|||
self.assertRaisesRegexp(
|
||||
TypeError, 'A dict or list instance expected',
|
||||
utils.parse_to_list_of_dicts, [42])
|
||||
|
||||
def test_safe_load_json(self):
|
||||
test_data = {'test_key': 'test_val'}
|
||||
|
||||
m_open = mock.mock_open(read_data=json.dumps(test_data))
|
||||
with mock.patch('fuelclient.tests.unit.common.test_utils.open',
|
||||
m_open):
|
||||
stream = open('/a/random/file', 'r')
|
||||
loaded = data_utils.safe_load('json', stream)
|
||||
|
||||
self.assertEqual(test_data, loaded)
|
||||
|
||||
def test_safe_load_yaml(self):
|
||||
test_data = {'test_key': 'test_val'}
|
||||
|
||||
m_open = mock.mock_open(read_data=yaml.dump(test_data))
|
||||
with mock.patch('fuelclient.tests.unit.common.test_utils.open',
|
||||
m_open):
|
||||
stream = open('/a/random/file', 'r')
|
||||
loaded = data_utils.safe_load('yaml', stream)
|
||||
|
||||
self.assertEqual(test_data, loaded)
|
||||
|
||||
@mock.patch('json.dump')
|
||||
def test_safe_dump_json(self, m_dump):
|
||||
test_data = {'test_key': 'test_val'}
|
||||
|
||||
m_open = mock.mock_open()
|
||||
with mock.patch('fuelclient.tests.unit.common.test_utils.open',
|
||||
m_open):
|
||||
stream = open('/a/random/file', 'w')
|
||||
data_utils.safe_dump('json', stream, test_data)
|
||||
|
||||
m_dump.assert_called_once_with(test_data, stream, indent=4)
|
||||
|
||||
@mock.patch('yaml.safe_dump')
|
||||
def test_safe_dump_yaml(self, m_dump):
|
||||
test_data = {'test_key': 'test_val'}
|
||||
|
||||
m_open = mock.mock_open()
|
||||
with mock.patch('fuelclient.tests.unit.common.test_utils.open',
|
||||
m_open):
|
||||
stream = open('/a/random/file', 'w')
|
||||
data_utils.safe_dump('yaml', stream, test_data)
|
||||
|
||||
m_dump.assert_called_once_with(test_data,
|
||||
stream,
|
||||
default_flow_style=False)
|
||||
|
||||
def test_safe_dump_wrong_format(self):
|
||||
test_data = {'test_key': 'test_val'}
|
||||
|
||||
m_open = mock.mock_open()
|
||||
with mock.patch('fuelclient.tests.unit.common.test_utils.open',
|
||||
m_open):
|
||||
stream = open('/a/random/file', 'w')
|
||||
self.assertRaises(ValueError,
|
||||
data_utils.safe_dump,
|
||||
'bad', stream, test_data)
|
||||
|
||||
def test_safe_load_wrong_format(self):
|
||||
m_open = mock.mock_open()
|
||||
with mock.patch('fuelclient.tests.unit.common.test_utils.open',
|
||||
m_open):
|
||||
stream = open('/a/random/file', 'w')
|
||||
self.assertRaises(ValueError,
|
||||
data_utils.safe_load,
|
||||
'bad', stream)
|
||||
|
|
|
@ -375,3 +375,44 @@ node-4 ansible_host=10.20.0.5
|
|||
|
||||
self.m_get_client.assert_called_once_with('node', mock.ANY)
|
||||
self.m_client.upload_attributes.assert_called_once_with(42, None)
|
||||
|
||||
|
||||
class TestNodeMixIn(test_engine.BaseCLITest):
|
||||
def test_get_attribute_path(self):
|
||||
mixin = cmd_node.NodeMixIn()
|
||||
|
||||
attr_types = ('attributes', 'interfaces', 'disks')
|
||||
file_format = 'json'
|
||||
node_id = 42
|
||||
directory = '/test'
|
||||
|
||||
for attr_type in attr_types:
|
||||
expected_path = '/test/node_42/{t}.json'.format(t=attr_type)
|
||||
real_path = mixin.get_attributes_path(attr_type, file_format,
|
||||
node_id, directory)
|
||||
|
||||
self.assertEqual(expected_path, real_path)
|
||||
|
||||
def test_get_attribute_path_wrong_attr_type(self):
|
||||
mixin = cmd_node.NodeMixIn()
|
||||
|
||||
attr_type = 'wrong'
|
||||
file_format = 'json'
|
||||
node_id = 42
|
||||
directory = '/test'
|
||||
|
||||
self.assertRaises(ValueError,
|
||||
mixin.get_attributes_path,
|
||||
attr_type, file_format, node_id, directory)
|
||||
|
||||
def test_get_attribute_path_wrong_file_format(self):
|
||||
mixin = cmd_node.NodeMixIn()
|
||||
|
||||
attr_type = 'interfaces'
|
||||
file_format = 'wrong'
|
||||
node_id = 42
|
||||
directory = '/test'
|
||||
|
||||
self.assertRaises(ValueError,
|
||||
mixin.get_attributes_path,
|
||||
attr_type, file_format, node_id, directory)
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
# of appearance. Changing the order has an impact on the overall integration
|
||||
# process, which may cause wedges in the gate later.
|
||||
cliff!=1.16.0,>=1.15.0 # Apache-2.0
|
||||
oslo.utils>=3.5.0 # Apache-2.0
|
||||
pbr>=1.6 # Apache-2.0
|
||||
python-keystoneclient!=1.8.0,!=2.1.0,>=1.6.0 # Apache-2.0
|
||||
PyYAML>=3.1.0 # MIT
|
||||
|
|
Loading…
Reference in New Issue