plugin's api update support
this change adding support of plugin's api patch method. blueprint: plugin-management-api Change-Id: I94dd1d4faf2ccbc3901ea5d5c7d69e7c5a4f8bdf
This commit is contained in:
parent
5cf912636b
commit
9e1b9923be
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
features:
|
||||
- Plugins updates are supported now in saharaclient. Also
|
||||
information about plugin labels is available for users.
|
|
@ -49,6 +49,12 @@ class PluginManager(base.ResourceManager):
|
|||
return self._get('/plugins/%s/%s' % (plugin_name, hadoop_version),
|
||||
'plugin')
|
||||
|
||||
def update(self, plugin_name, values):
|
||||
"""Update plugin and then return updated result to user
|
||||
|
||||
"""
|
||||
return self._patch("/plugins/%s" % plugin_name, values, 'plugin')
|
||||
|
||||
def convert_to_cluster_template(self, plugin_name, hadoop_version,
|
||||
template_name, filecontent):
|
||||
"""Convert to cluster template
|
||||
|
|
|
@ -13,17 +13,34 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
from os import path
|
||||
import sys
|
||||
|
||||
from osc_lib.command import command
|
||||
from osc_lib import exceptions
|
||||
from osc_lib import utils as osc_utils
|
||||
from oslo_log import log as logging
|
||||
from oslo_serialization import jsonutils
|
||||
import six
|
||||
|
||||
from saharaclient.osc.v1 import utils
|
||||
|
||||
|
||||
def _serialize_label_items(plugin):
|
||||
labels = {}
|
||||
pl_labels = plugin.get('plugin_labels', {})
|
||||
for label, data in six.iteritems(pl_labels):
|
||||
labels['plugin: %s' % label] = data['status']
|
||||
vr_labels = plugin.get('version_labels', {})
|
||||
for version, version_data in six.iteritems(vr_labels):
|
||||
for label, data in six.iteritems(version_data):
|
||||
labels[
|
||||
'plugin version %s: %s' % (version, label)] = data['status']
|
||||
labels = utils.prepare_data(labels, list(labels.keys()))
|
||||
return sorted(labels.items())
|
||||
|
||||
|
||||
class ListPlugins(command.Lister):
|
||||
"""Lists plugins"""
|
||||
|
||||
|
@ -98,20 +115,22 @@ class ShowPlugin(command.ShowOne):
|
|||
processes[k] = osc_utils.format_list(v)
|
||||
data['required_image_tags'] = osc_utils.format_list(
|
||||
data['required_image_tags'])
|
||||
|
||||
label_items = _serialize_label_items(data)
|
||||
data = utils.prepare_data(
|
||||
data, ['required_image_tags', 'name', 'description', 'title'])
|
||||
|
||||
data = zip(*sorted(data.items()) + [('', ''), (
|
||||
'Service:', 'Available processes:'), ('', '')] + sorted(
|
||||
processes.items()))
|
||||
data = self.dict2columns(data)
|
||||
data = utils.extend_columns(data, label_items)
|
||||
data = utils.extend_columns(
|
||||
data, [('Service:', 'Available processes:')])
|
||||
data = utils.extend_columns(
|
||||
data, sorted(processes.items()))
|
||||
else:
|
||||
data = client.plugins.get(parsed_args.plugin).to_dict()
|
||||
data['versions'] = osc_utils.format_list(data['versions'])
|
||||
items = _serialize_label_items(data)
|
||||
data = utils.prepare_data(
|
||||
data, ['versions', 'name', 'description', 'title'])
|
||||
data = self.dict2columns(data)
|
||||
|
||||
data = utils.extend_columns(self.dict2columns(data), items)
|
||||
return data
|
||||
|
||||
|
||||
|
@ -159,3 +178,41 @@ class GetPluginConfigs(command.Command):
|
|||
'"%(plugin)s" plugin configs was saved in "%(file)s"'
|
||||
'file' % {'plugin': parsed_args.plugin,
|
||||
'file': parsed_args.file})
|
||||
|
||||
|
||||
class UpdatePlugin(command.ShowOne):
|
||||
log = logging.getLogger(__name__ + ".UpdatePlugin")
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(UpdatePlugin, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
"plugin",
|
||||
metavar="<plugin>",
|
||||
help="Name of the plugin to provide config information about",
|
||||
)
|
||||
parser.add_argument(
|
||||
'json',
|
||||
metavar="<json>",
|
||||
help='JSON representation of the plugin update dictionary',
|
||||
)
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
self.log.debug("take_action(%s)" % parsed_args)
|
||||
client = self.app.client_manager.data_processing
|
||||
blob = osc_utils.read_blob_file_contents(parsed_args.json)
|
||||
try:
|
||||
update_dict = json.loads(blob)
|
||||
except ValueError as e:
|
||||
raise exceptions.CommandError(
|
||||
'An error occurred when reading '
|
||||
'update dict from file %s: %s' % (parsed_args.json, e))
|
||||
plugin = client.plugins.update(parsed_args.plugin, update_dict)
|
||||
data = plugin.to_dict()
|
||||
data['versions'] = osc_utils.format_list(data['versions'])
|
||||
items = _serialize_label_items(data)
|
||||
data = utils.prepare_data(
|
||||
data, ['versions', 'name', 'description', 'title'])
|
||||
data = utils.extend_columns(self.dict2columns(data), items)
|
||||
|
||||
return data
|
||||
|
|
|
@ -49,6 +49,14 @@ def prepare_data(data, fields):
|
|||
return new_data
|
||||
|
||||
|
||||
def unzip(data):
|
||||
return zip(*data)
|
||||
|
||||
|
||||
def extend_columns(columns, items):
|
||||
return unzip(list(unzip(columns)) + [('', '')] + items)
|
||||
|
||||
|
||||
def prepare_column_headers(columns, remap=None):
|
||||
remap = remap if remap else {}
|
||||
new_columns = []
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import mock
|
||||
|
||||
from saharaclient.api import plugins as api_plugins
|
||||
|
@ -28,7 +29,8 @@ PLUGIN_INFO = {'name': 'fake',
|
|||
'node_processes': {
|
||||
'HDFS': ['datanode', 'namenode'],
|
||||
'MapReduce': ['jobtracker', 'tasktracker']
|
||||
}}
|
||||
}, 'plugin_labels': {'enabled': {'status': True}},
|
||||
'version_labels': {'0.1': {'enabled': {'status': True}}}}
|
||||
|
||||
|
||||
class TestPlugins(fakes.TestDataProcessing):
|
||||
|
@ -104,11 +106,13 @@ class TestShowPlugin(TestPlugins):
|
|||
self.plugins_mock.get.assert_called_once_with('fake')
|
||||
|
||||
# Check that columns are correct
|
||||
expected_columns = ('Description', 'Name', 'Title', 'Versions')
|
||||
expected_columns = ('Description', 'Name', 'Title', 'Versions', '',
|
||||
'Plugin version 0.1: enabled', 'Plugin: enabled')
|
||||
self.assertEqual(expected_columns, columns)
|
||||
|
||||
# Check that data is correct
|
||||
expected_data = ('Plugin for tests', 'fake', 'Fake Plugin', '0.1, 0.2')
|
||||
expected_data = ('Plugin for tests', 'fake', 'Fake Plugin',
|
||||
'0.1, 0.2', '', True, True)
|
||||
self.assertEqual(expected_data, data)
|
||||
|
||||
def test_plugin_version_show(self):
|
||||
|
@ -125,12 +129,15 @@ class TestShowPlugin(TestPlugins):
|
|||
|
||||
# Check that columns are correct
|
||||
expected_columns = ('Description', 'Name', 'Required image tags',
|
||||
'Title', '', 'Service:', '', 'HDFS', 'MapReduce')
|
||||
'Title', '', 'Plugin version 0.1: enabled',
|
||||
'Plugin: enabled', '', 'Service:', '', 'HDFS',
|
||||
'MapReduce')
|
||||
self.assertEqual(expected_columns, columns)
|
||||
|
||||
# Check that data is correct
|
||||
expected_data = ('Plugin for tests', 'fake', '0.1, fake',
|
||||
'Fake Plugin', '', 'Available processes:', '',
|
||||
'Fake Plugin', '', True, True, '',
|
||||
'Available processes:', '',
|
||||
'datanode, namenode', 'jobtracker, tasktracker')
|
||||
self.assertEqual(expected_data, data)
|
||||
|
||||
|
@ -188,3 +195,38 @@ class TestGetPluginConfigs(TestPlugins):
|
|||
self.assertEqual(PLUGIN_INFO, args_to_dump[0])
|
||||
# Check that data will be saved to the right file
|
||||
self.assertEqual('testfile', m_open.call_args[0][0])
|
||||
|
||||
|
||||
class TestUpdatePlugin(TestPlugins):
|
||||
def setUp(self):
|
||||
super(TestUpdatePlugin, self).setUp()
|
||||
self.plugins_mock.update.return_value = api_plugins.Plugin(
|
||||
None, PLUGIN_INFO)
|
||||
|
||||
# Command to test
|
||||
self.cmd = osc_plugins.UpdatePlugin(self.app, None)
|
||||
|
||||
@mock.patch('osc_lib.utils.read_blob_file_contents')
|
||||
def test_plugin_update(self, read):
|
||||
arglist = ['fake', 'update.json']
|
||||
verifylist = [('plugin', 'fake'), ('json', 'update.json')]
|
||||
value = {'plugin_labels': {'enabled': {'status': True}}}
|
||||
value = json.dumps(value)
|
||||
read.return_value = value
|
||||
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
|
||||
|
||||
columns, data = self.cmd.take_action(parsed_args)
|
||||
|
||||
# Check that correct arguments were passed
|
||||
self.plugins_mock.update.assert_called_once_with(
|
||||
'fake', {'plugin_labels': {'enabled': {'status': True}}})
|
||||
|
||||
# Check that columns are correct
|
||||
expected_columns = ('Description', 'Name', 'Title', 'Versions', '',
|
||||
'Plugin version 0.1: enabled', 'Plugin: enabled')
|
||||
self.assertEqual(expected_columns, columns)
|
||||
|
||||
# Check that data is correct
|
||||
expected_data = ('Plugin for tests', 'fake', 'Fake Plugin',
|
||||
'0.1, 0.2', '', True, True)
|
||||
self.assertEqual(expected_data, data)
|
||||
|
|
|
@ -37,6 +37,7 @@ openstack.data_processing.v1 =
|
|||
dataprocessing_plugin_list = saharaclient.osc.v1.plugins:ListPlugins
|
||||
dataprocessing_plugin_show = saharaclient.osc.v1.plugins:ShowPlugin
|
||||
dataprocessing_plugin_configs_get = saharaclient.osc.v1.plugins:GetPluginConfigs
|
||||
dataprocessing_plugin_update = saharaclient.osc.v1.plugins:UpdatePlugin
|
||||
|
||||
dataprocessing_data_source_create = saharaclient.osc.v1.data_sources:CreateDataSource
|
||||
dataprocessing_data_source_list = saharaclient.osc.v1.data_sources:ListDataSources
|
||||
|
|
Loading…
Reference in New Issue