Sync with dashboard
All recent changes made to savanna-dashboard applied to client. This patch should be last sync between repositories. Savanna Dashboard will be updated to use python-savannaclient Change-Id: Ib9807c05ba5af650e1f668e8c642be58025b85ef
This commit is contained in:
@@ -56,7 +56,6 @@ def _check_items(obj, searches):
|
||||
return False
|
||||
|
||||
|
||||
#TODO(nkonovalov) handle response body in case of error
|
||||
class ResourceManager(object):
|
||||
resource_class = None
|
||||
|
||||
@@ -78,9 +77,9 @@ class ResourceManager(object):
|
||||
self._raise_api_exception(resp)
|
||||
|
||||
if response_key is not None:
|
||||
data = resp.json()[response_key]
|
||||
data = get_json(resp)[response_key]
|
||||
else:
|
||||
data = resp.json()
|
||||
data = get_json(resp)
|
||||
return self.resource_class(self, data)
|
||||
|
||||
def _update(self, url, data):
|
||||
@@ -91,7 +90,7 @@ class ResourceManager(object):
|
||||
def _list(self, url, response_key):
|
||||
resp = self.api.client.get(url)
|
||||
if resp.status_code == 200:
|
||||
data = resp.json()[response_key]
|
||||
data = get_json(resp)[response_key]
|
||||
|
||||
return [self.resource_class(self, res)
|
||||
for res in data]
|
||||
@@ -103,9 +102,9 @@ class ResourceManager(object):
|
||||
|
||||
if resp.status_code == 200:
|
||||
if response_key is not None:
|
||||
data = resp.json()[response_key]
|
||||
data = get_json(resp)[response_key]
|
||||
else:
|
||||
data = resp.json()
|
||||
data = get_json(resp)
|
||||
return self.resource_class(self, data)
|
||||
else:
|
||||
self._raise_api_exception(resp)
|
||||
@@ -120,9 +119,21 @@ class ResourceManager(object):
|
||||
return self.resource_class.resource_name + 's'
|
||||
|
||||
def _raise_api_exception(self, resp):
|
||||
error_data = resp.json()
|
||||
error_data = get_json(resp)
|
||||
raise APIException(error_data["error_message"])
|
||||
|
||||
|
||||
def get_json(response):
|
||||
"""This method provided backward compatibility with old versions
|
||||
of requests library
|
||||
|
||||
"""
|
||||
json_field_or_function = getattr(response, 'json', None)
|
||||
if callable(json_field_or_function):
|
||||
return response.json()
|
||||
else:
|
||||
return json.loads(response.content)
|
||||
|
||||
|
||||
class APIException(Exception):
|
||||
pass
|
||||
|
||||
@@ -29,7 +29,6 @@ from savannaclient.api import images
|
||||
from savannaclient.api import job_binaries
|
||||
from savannaclient.api import job_binary_internals
|
||||
from savannaclient.api import job_executions
|
||||
from savannaclient.api import job_origins
|
||||
from savannaclient.api import jobs
|
||||
from savannaclient.api import node_group_templates
|
||||
from savannaclient.api import plugins
|
||||
@@ -38,10 +37,10 @@ LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Client(object):
|
||||
def __init__(self, username, api_key, project_id=None, project_name=None,
|
||||
auth_url=None, savanna_url=None, timeout=None,
|
||||
endpoint_type='publicURL', service_type='mapreduce',
|
||||
input_auth_token=None):
|
||||
def __init__(self, username=None, api_key=None, project_id=None,
|
||||
project_name=None, auth_url=None, savanna_url=None,
|
||||
timeout=None, endpoint_type='publicURL',
|
||||
service_type='mapreduce', input_auth_token=None):
|
||||
if savanna_url and not isinstance(savanna_url, six.string_types):
|
||||
raise RuntimeError('Savanna url should be string')
|
||||
if (isinstance(project_name, six.string_types) or
|
||||
@@ -93,7 +92,6 @@ class Client(object):
|
||||
|
||||
self.data_sources = data_sources.DataSourceManager(self)
|
||||
self.jobs = jobs.JobsManager(self)
|
||||
self.job_origins = job_origins.JobOriginsManager(self)
|
||||
self.job_executions = job_executions.JobExecutionsManager(self)
|
||||
self.job_binaries = job_binaries.JobBinariesManager(self)
|
||||
self.job_binary_internals =\
|
||||
|
||||
@@ -26,18 +26,20 @@ class ClusterTemplateManager(base.ResourceManager):
|
||||
resource_class = ClusterTemplate
|
||||
|
||||
def create(self, name, plugin_name, hadoop_version, description,
|
||||
cluster_configs, node_groups, anti_affinity):
|
||||
# expecting node groups to be array of dictionaries
|
||||
cluster_configs, node_groups, anti_affinity, net_id=None):
|
||||
data = {
|
||||
'name': name,
|
||||
'plugin_name': plugin_name,
|
||||
'hadoop_version': hadoop_version,
|
||||
'description': description,
|
||||
'cluster_configs': cluster_configs,
|
||||
'node_groups': node_groups,
|
||||
'node_groups': [ng.as_dict() for ng in node_groups],
|
||||
'anti_affinity': anti_affinity
|
||||
}
|
||||
|
||||
if net_id:
|
||||
data.update({'neutron_management_network': net_id})
|
||||
|
||||
return self._create('/cluster-templates', data, 'cluster_template')
|
||||
|
||||
def list(self):
|
||||
|
||||
@@ -31,12 +31,19 @@ class ClusterManager(base.ResourceManager):
|
||||
raise base.APIException('Cluster is missing field "%s"' %
|
||||
var_name)
|
||||
|
||||
def _copy_if_defined(self, data, **kwargs):
|
||||
for var_name, var_value in kwargs.iteritems():
|
||||
if var_value is not None:
|
||||
data[var_name] = var_value
|
||||
|
||||
def create(self, name, plugin_name, hadoop_version,
|
||||
cluster_template_id=None, default_image_id=None,
|
||||
description=None, cluster_configs=None, node_groups=None,
|
||||
user_keypair_id=None, anti_affinity=None):
|
||||
user_keypair_id=None, anti_affinity=None, net_id=None):
|
||||
|
||||
if node_groups is not None:
|
||||
node_groups = [ng.as_dict() for ng in node_groups]
|
||||
|
||||
# expecting node groups to be array of dictionaries
|
||||
data = {
|
||||
'name': name,
|
||||
'plugin_name': plugin_name,
|
||||
@@ -55,7 +62,8 @@ class ClusterManager(base.ResourceManager):
|
||||
cluster_configs=cluster_configs,
|
||||
node_groups=node_groups,
|
||||
user_keypair_id=user_keypair_id,
|
||||
anti_affinity=anti_affinity)
|
||||
anti_affinity=anti_affinity,
|
||||
neutron_management_network=net_id)
|
||||
|
||||
return self._create('/clusters', data, 'cluster')
|
||||
|
||||
|
||||
@@ -23,23 +23,23 @@ class DataSources(base.Resource):
|
||||
class DataSourceManager(base.ResourceManager):
|
||||
resource_class = DataSources
|
||||
|
||||
def list(self):
|
||||
return self._list('/data-sources', "data_sources")
|
||||
|
||||
def delete(self, data_source_id):
|
||||
return self._delete('/data-sources/%s' % data_source_id)
|
||||
|
||||
def get(self, data_source_id):
|
||||
return self._get('/data-sources/%s' % data_source_id,
|
||||
'resource')
|
||||
|
||||
def create(self, name, description, data_source_type, url, credentials):
|
||||
def create(self, name, description, data_source_type,
|
||||
url, credential_user, credential_pass):
|
||||
data = {
|
||||
'name': name,
|
||||
'description': description,
|
||||
'type': data_source_type,
|
||||
'url': url,
|
||||
'credentials': credentials
|
||||
'credentials': {'user': credential_user,
|
||||
'password': credential_pass}
|
||||
}
|
||||
|
||||
return self._create('/data-sources', data, 'data_source')
|
||||
|
||||
def list(self):
|
||||
return self._list('/data-sources', 'data_sources')
|
||||
|
||||
def get(self, data_source_id):
|
||||
return self._get('/data-sources/%s' % data_source_id, 'data_source')
|
||||
|
||||
def delete(self, data_source_id):
|
||||
self._delete('/data-sources/%s' % data_source_id)
|
||||
|
||||
@@ -23,18 +23,21 @@ class JobBinaries(base.Resource):
|
||||
class JobBinariesManager(base.ResourceManager):
|
||||
resource_class = JobBinaries
|
||||
|
||||
def list(self):
|
||||
return self._list('/job-binaries', "binaries")
|
||||
|
||||
def delete(self, job_binary_id):
|
||||
return self._delete('/job-binaries/%s' % job_binary_id)
|
||||
|
||||
def create(self, name, url, description=None):
|
||||
def create(self, name, url, description, extra):
|
||||
data = {
|
||||
'name': name,
|
||||
'url': url,
|
||||
"name": name,
|
||||
"url": url,
|
||||
"description": description,
|
||||
"extra": extra
|
||||
}
|
||||
|
||||
self._copy_if_defined(data, description=description)
|
||||
return self._create('/job-binaries', data, 'job_binary')
|
||||
|
||||
return self._create("/job-binaries", data, "job_binary")
|
||||
def list(self):
|
||||
return self._list('/job-binaries', 'binaries')
|
||||
|
||||
def get(self, job_binary_id):
|
||||
return self._get('/job-binaries/%s' % job_binary_id, 'job_binary')
|
||||
|
||||
def delete(self, job_binary_id):
|
||||
self._delete('/job-binaries/%s' % job_binary_id)
|
||||
|
||||
@@ -23,18 +23,16 @@ class JobBinaryInternal(base.Resource):
|
||||
class JobBinaryInternalsManager(base.ResourceManager):
|
||||
resource_class = JobBinaryInternal
|
||||
|
||||
def create(self, name, data):
|
||||
return self._create('/job-binary-internals/%s' % name, data,
|
||||
'job_binary_internal')
|
||||
|
||||
def list(self):
|
||||
return self._list('/job-binary-internals', "binaries")
|
||||
return self._list('/job-binary-internals', 'binaries')
|
||||
|
||||
def get(self, job_binary_id):
|
||||
return self._get('/job-binary-internals/%s' % job_binary_id,
|
||||
'job_binary_internal')
|
||||
|
||||
def delete(self, job_binary_id):
|
||||
return self._delete('/job-binary-internals/%s' % job_binary_id)
|
||||
|
||||
def create(self, name, data):
|
||||
url = '/job-binary-internals/%s' % name
|
||||
resp = self.api.client.put(url, data)
|
||||
|
||||
if resp.status_code != 202:
|
||||
self._raise_api_exception(resp)
|
||||
|
||||
data = resp.json()["job_binary_internal"]
|
||||
return self.resource_class(self, data)
|
||||
self._delete('/job-binary-internals/%s' % job_binary_id)
|
||||
|
||||
@@ -16,30 +16,29 @@
|
||||
from savannaclient.api import base
|
||||
|
||||
|
||||
class JobExecutions(base.Resource):
|
||||
class JobExecution(base.Resource):
|
||||
resource_name = 'JobExecution'
|
||||
|
||||
|
||||
class JobExecutionsManager(base.ResourceManager):
|
||||
resource_class = JobExecutions
|
||||
resource_class = JobExecution
|
||||
|
||||
def list(self):
|
||||
return self._list('/job-executions', "job_executions")
|
||||
return self._list('/job-executions', 'job_executions')
|
||||
|
||||
def get(self, job_execution_id):
|
||||
return self._get('/job-executions/%s' % job_execution_id)
|
||||
def get(self, obj_id):
|
||||
return self._get('/job-executions/%s' % obj_id, 'job_execution')
|
||||
|
||||
def cancel(self, job_execution_id):
|
||||
return self._get('/job-executions/%s/cancel' % job_execution_id)
|
||||
def delete(self, obj_id):
|
||||
self._delete('/job-executions/%s' % obj_id)
|
||||
|
||||
def delete(self, job_execution_id):
|
||||
return self._delete('/job-executions/%s' % job_execution_id)
|
||||
|
||||
def execute(self, job_id, input_id, output_id, cluster_id):
|
||||
def create(self, job_id, cluster_id, input_id, output_id, configs):
|
||||
url = "/jobs/%s/execute" % job_id
|
||||
data = {
|
||||
'input_id': input_id,
|
||||
'output_id': output_id,
|
||||
'cluster_id': cluster_id,
|
||||
"input_id": input_id,
|
||||
"output_id": output_id,
|
||||
"cluster_id": cluster_id,
|
||||
"job_configs": configs
|
||||
}
|
||||
|
||||
return self._create('/jobs/%s/execute' % job_id, data, "job_execution")
|
||||
return self._create(url, data, 'job_execution')
|
||||
|
||||
@@ -1,43 +0,0 @@
|
||||
# Copyright (c) 2013 Mirantis Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from savannaclient.api import base
|
||||
|
||||
|
||||
class JobOrigins(base.Resource):
|
||||
resource_name = 'JobOrigin'
|
||||
|
||||
|
||||
class JobOriginsManager(base.ResourceManager):
|
||||
resource_class = JobOrigins
|
||||
|
||||
def list(self):
|
||||
return self._list('/job-origins', "job_origins")
|
||||
|
||||
def delete(self, job_origin_id):
|
||||
return self._delete('/job-origins/%s' % job_origin_id)
|
||||
|
||||
def get(self, job_origin_id):
|
||||
return self._get('/job-origins/%s' % job_origin_id)
|
||||
|
||||
def create(self, name, description, mains, libs=[]):
|
||||
data = {
|
||||
'name': name,
|
||||
'description': description,
|
||||
'mains': mains,
|
||||
'libs': libs,
|
||||
}
|
||||
|
||||
return self._create('/job-origins', data, "job_origin")
|
||||
@@ -16,31 +16,29 @@
|
||||
from savannaclient.api import base
|
||||
|
||||
|
||||
class Jobs(base.Resource):
|
||||
class Job(base.Resource):
|
||||
resource_name = 'Job'
|
||||
|
||||
|
||||
class JobsManager(base.ResourceManager):
|
||||
resource_class = Jobs
|
||||
resource_class = Job
|
||||
|
||||
def list(self):
|
||||
return self._list('/jobs', "jobs")
|
||||
|
||||
def delete(self, job_id):
|
||||
return self._delete('/jobs/%s' % job_id)
|
||||
|
||||
def get(self, job_id):
|
||||
return self._get('/jobs/%s' % job_id)
|
||||
|
||||
def create(self, name, description, job_origin_id,
|
||||
job_type, input_type, output_type):
|
||||
def create(self, name, type, mains, libs, description):
|
||||
data = {
|
||||
'name': name,
|
||||
'type': type,
|
||||
'description': description,
|
||||
'type': job_type,
|
||||
'job_origin_id': job_origin_id,
|
||||
'input_type': input_type,
|
||||
'output_type': output_type
|
||||
'mains': mains,
|
||||
'libs': libs
|
||||
}
|
||||
|
||||
return self._create('/jobs', data, 'job')
|
||||
|
||||
def list(self):
|
||||
return self._list('/jobs', 'jobs')
|
||||
|
||||
def get(self, job_id):
|
||||
return self._get('/jobs/%s' % job_id, 'job')
|
||||
|
||||
def delete(self, job_id):
|
||||
self._delete('/jobs/%s' % job_id)
|
||||
|
||||
@@ -27,7 +27,7 @@ class NodeGroupTemplateManager(base.ResourceManager):
|
||||
|
||||
def create(self, name, plugin_name, hadoop_version, flavor_id,
|
||||
description=None, volumes_per_node=None, volumes_size=None,
|
||||
node_processes=None, node_configs=None):
|
||||
node_processes=None, node_configs=None, floating_ip_pool=None):
|
||||
|
||||
data = {
|
||||
'name': name,
|
||||
@@ -35,16 +35,21 @@ class NodeGroupTemplateManager(base.ResourceManager):
|
||||
'hadoop_version': hadoop_version,
|
||||
'description': description,
|
||||
'flavor_id': flavor_id,
|
||||
'node_processes': node_processes,
|
||||
'node_configs': node_configs
|
||||
'node_processes': node_processes
|
||||
}
|
||||
|
||||
if not node_configs:
|
||||
data["node_configs"] = dict()
|
||||
|
||||
if floating_ip_pool:
|
||||
data.update({"floating_ip_pool": floating_ip_pool})
|
||||
|
||||
if volumes_per_node:
|
||||
data.update({"volumes_per_node": volumes_per_node,
|
||||
"volumes_size": volumes_size})
|
||||
|
||||
return self._create(
|
||||
'/node-group-templates', data, 'node_group_template')
|
||||
return self._create('/node-group-templates', data,
|
||||
'node_group_template')
|
||||
|
||||
def list(self):
|
||||
return self._list('/node-group-templates', 'node_group_templates')
|
||||
|
||||
@@ -15,6 +15,8 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import urllib
|
||||
|
||||
from savannaclient.api import base
|
||||
|
||||
|
||||
@@ -36,10 +38,15 @@ class PluginManager(base.ResourceManager):
|
||||
'plugin')
|
||||
|
||||
def convert_to_cluster_template(self, plugin_name, hadoop_version,
|
||||
filecontent):
|
||||
resp = self.api.client.post('/plugins/%s/%s/convert-config' %
|
||||
(plugin_name, hadoop_version), filecontent)
|
||||
template_name, filecontent):
|
||||
resp = self.api.client.post('/plugins/%s/%s/convert-config/%s' %
|
||||
(plugin_name,
|
||||
hadoop_version,
|
||||
urllib.quote(template_name)),
|
||||
filecontent)
|
||||
if resp.status_code != 202:
|
||||
raise RuntimeError('Failed to upload template file for plugin "%s"'
|
||||
' and version "%s"' %
|
||||
(plugin_name, hadoop_version))
|
||||
else:
|
||||
return base.get_json(resp)['cluster_template']
|
||||
|
||||
12
setup.cfg
12
setup.cfg
@@ -1,5 +1,6 @@
|
||||
[metadata]
|
||||
name = python-savannaclient
|
||||
version = 0.3
|
||||
summary = Client library for Savanna API
|
||||
description-file =
|
||||
README.rst
|
||||
@@ -18,11 +19,10 @@ classifier =
|
||||
Programming Language :: Python :: 2.7
|
||||
Programming Language :: Python :: 2.6
|
||||
|
||||
[files]
|
||||
packages =
|
||||
savannaclient
|
||||
|
||||
[global]
|
||||
setup-hooks =
|
||||
pbr.hooks.setup_hook
|
||||
setup-hooks = pbr.hooks.setup_hook
|
||||
|
||||
|
||||
[files]
|
||||
packages = savannaclient
|
||||
|
||||
|
||||
Reference in New Issue
Block a user