Merge "rename service api modules"
This commit is contained in:
commit
587ecb66df
@ -97,3 +97,19 @@ https://wiki.openstack.org/wiki/Sahara/api-v2
|
||||
|
||||
This page will help to coordinate the various reviews, specs, and work
|
||||
items that are a continuing facet of this work.
|
||||
|
||||
The API service layer
|
||||
---------------------
|
||||
|
||||
When contributing to the version 2 API, it will be necessary to add code
|
||||
that modifies the data and behavior of HTTP calls as they are sent to
|
||||
and from the processing engine and data abstraction layers. Most
|
||||
frequently in the sahara codebase, these interactions are handled in the
|
||||
modules of the ``sahara.service.api`` package. This package contains
|
||||
code for all versions of the API and follows a namespace mapping that is
|
||||
similar to the routing functions of ``sahara.api``
|
||||
|
||||
Although these modules are not the definitive end of all answers to API
|
||||
related code questions, they are a solid starting point when examining
|
||||
the extent of new work. Furthermore, they serve as a central point to
|
||||
begin API debugging efforts when the need arises.
|
||||
|
@ -17,7 +17,7 @@ from oslo_log import log as logging
|
||||
import six
|
||||
|
||||
from sahara.api import acl
|
||||
from sahara.service import api
|
||||
from sahara.service.api import v10 as api
|
||||
from sahara.service import validation as v
|
||||
from sahara.service.validations import cluster_template_schema as ct_schema
|
||||
from sahara.service.validations import cluster_templates as v_ct
|
||||
|
@ -16,7 +16,7 @@
|
||||
from oslo_log import log as logging
|
||||
|
||||
from sahara.api import acl
|
||||
from sahara.service.edp import api
|
||||
from sahara.service.api import v11 as api
|
||||
from sahara.service import validation as v
|
||||
from sahara.service.validations.edp import data_source as v_d_s
|
||||
from sahara.service.validations.edp import data_source_schema as v_d_s_schema
|
||||
|
@ -14,7 +14,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
from sahara.api import acl
|
||||
from sahara.service import api
|
||||
from sahara.service.api.v2 import cluster_templates as api
|
||||
from sahara.service import validation as v
|
||||
from sahara.service.validations import cluster_template_schema as ct_schema
|
||||
from sahara.service.validations import cluster_templates as v_ct
|
||||
|
@ -16,7 +16,7 @@
|
||||
import six
|
||||
|
||||
from sahara.api import acl
|
||||
from sahara.service import api
|
||||
from sahara.service.api.v2 import clusters as api
|
||||
from sahara.service import validation as v
|
||||
from sahara.service.validations import clusters as v_c
|
||||
from sahara.service.validations import clusters_scaling as v_c_s
|
||||
|
@ -14,7 +14,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
from sahara.api import acl
|
||||
from sahara.service.edp import api
|
||||
from sahara.service.api.v2 import data_sources as api
|
||||
from sahara.service import validation as v
|
||||
from sahara.service.validations.edp import data_source as v_d_s
|
||||
from sahara.service.validations.edp import data_source_schema as v_d_s_schema
|
||||
|
@ -14,7 +14,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
from sahara.api import acl
|
||||
from sahara.service import api
|
||||
from sahara.service.api.v2 import images as api
|
||||
from sahara.service import validation as v
|
||||
from sahara.service.validations import images as v_images
|
||||
import sahara.utils.api as u
|
||||
|
@ -14,7 +14,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
from sahara.api import acl
|
||||
from sahara.service.edp import api
|
||||
from sahara.service.api.v2 import job_binaries as api
|
||||
from sahara.service import validation as v
|
||||
from sahara.service.validations.edp import job_binary as v_j_b
|
||||
from sahara.service.validations.edp import job_binary_internal as v_j_b_i
|
||||
|
@ -15,7 +15,7 @@
|
||||
|
||||
|
||||
from sahara.api import acl
|
||||
from sahara.service.edp import api
|
||||
from sahara.service.api.v2 import job_executions as api
|
||||
from sahara.service import validation as v
|
||||
from sahara.service.validations.edp import job_execution as v_j_e
|
||||
from sahara.service.validations.edp import job_execution_schema as v_j_e_schema
|
||||
|
@ -14,7 +14,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
from sahara.api import acl
|
||||
from sahara.service.edp import api
|
||||
from sahara.service.api.v2 import job_types as api
|
||||
import sahara.utils.api as u
|
||||
|
||||
|
||||
|
@ -14,7 +14,8 @@
|
||||
# limitations under the License.
|
||||
|
||||
from sahara.api import acl
|
||||
from sahara.service.edp import api
|
||||
from sahara.service.api.v2 import job_executions as j_e_api
|
||||
from sahara.service.api.v2 import jobs as api
|
||||
from sahara.service import validation as v
|
||||
from sahara.service.validations.edp import job as v_j
|
||||
from sahara.service.validations.edp import job_execution as v_j_e
|
||||
@ -68,7 +69,7 @@ def job_delete(job_id):
|
||||
@v.check_exists(api.get_job, id='job_id')
|
||||
@v.validate(v_j_e_schema.JOB_EXEC_SCHEMA, v_j_e.check_job_execution)
|
||||
def job_execute(job_id, data):
|
||||
return u.render(job_execution=api.execute_job(job_id, data).to_dict())
|
||||
return u.render(job_execution=j_e_api.execute_job(job_id, data).to_dict())
|
||||
|
||||
|
||||
@rest.get('/jobs/config-hints/<job_type>')
|
||||
|
@ -15,7 +15,7 @@
|
||||
|
||||
|
||||
from sahara.api import acl
|
||||
from sahara.service import api
|
||||
from sahara.service.api.v2 import node_group_templates as api
|
||||
from sahara.service import validation as v
|
||||
from sahara.service.validations import node_group_template_schema as ngt_schema
|
||||
from sahara.service.validations import node_group_templates as v_ngt
|
||||
|
@ -14,7 +14,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
from sahara.api import acl
|
||||
from sahara.service import api
|
||||
from sahara.service.api.v2 import plugins as api
|
||||
from sahara.service import validation as v
|
||||
from sahara.service.validations import plugins as v_p
|
||||
import sahara.utils.api as u
|
||||
|
@ -28,9 +28,8 @@ from sahara import config
|
||||
from sahara.i18n import _LI
|
||||
from sahara.i18n import _LW
|
||||
from sahara.plugins import base as plugins_base
|
||||
from sahara.service import api as service_api
|
||||
from sahara.service import api
|
||||
from sahara.service.castellan import config as castellan
|
||||
from sahara.service.edp import api as edp_api
|
||||
from sahara.service import ops as service_ops
|
||||
from sahara.service import periodic
|
||||
from sahara.utils.openstack import cinder
|
||||
@ -99,8 +98,7 @@ def setup_common(possible_topdir, service_name):
|
||||
def setup_sahara_api(mode):
|
||||
ops = _get_ops_driver(mode)
|
||||
|
||||
service_api.setup_service_api(ops)
|
||||
edp_api.setup_edp_api(ops)
|
||||
api.setup_api(ops)
|
||||
|
||||
|
||||
def setup_sahara_engine():
|
||||
|
23
sahara/service/api/__init__.py
Normal file
23
sahara/service/api/__init__.py
Normal file
@ -0,0 +1,23 @@
|
||||
# Copyright (c) 2016 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
OPS = None
|
||||
|
||||
|
||||
def setup_api(ops):
|
||||
global OPS
|
||||
|
||||
OPS = ops
|
@ -22,6 +22,7 @@ from six.moves.urllib import parse as urlparse
|
||||
from sahara import conductor as c
|
||||
from sahara import context
|
||||
from sahara.plugins import base as plugin_base
|
||||
from sahara.service import api
|
||||
from sahara.service.health import verification_base
|
||||
from sahara.service import quotas
|
||||
from sahara.utils import cluster as c_u
|
||||
@ -36,15 +37,6 @@ CONF = cfg.CONF
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
OPS = None
|
||||
|
||||
|
||||
def setup_service_api(ops):
|
||||
global OPS
|
||||
|
||||
OPS = ops
|
||||
|
||||
|
||||
# Cluster ops
|
||||
|
||||
def get_clusters(**kwargs):
|
||||
@ -96,7 +88,7 @@ def scale_cluster(id, data):
|
||||
if node_group.id not in to_be_enlarged:
|
||||
to_be_enlarged[node_group.id] = node_group.count
|
||||
|
||||
OPS.provision_scaled_cluster(id, to_be_enlarged)
|
||||
api.OPS.provision_scaled_cluster(id, to_be_enlarged)
|
||||
return cluster
|
||||
|
||||
|
||||
@ -143,7 +135,7 @@ def _cluster_create(values, plugin):
|
||||
c_u.change_cluster_status(
|
||||
cluster, c_u.CLUSTER_STATUS_ERROR, six.text_type(e))
|
||||
|
||||
OPS.provision_cluster(cluster.id)
|
||||
api.OPS.provision_cluster(cluster.id)
|
||||
|
||||
return cluster
|
||||
|
||||
@ -166,14 +158,14 @@ def terminate_cluster(id):
|
||||
if cluster is None:
|
||||
return
|
||||
|
||||
OPS.terminate_cluster(id)
|
||||
api.OPS.terminate_cluster(id)
|
||||
sender.status_notify(cluster.id, cluster.name, cluster.status,
|
||||
"delete")
|
||||
|
||||
|
||||
def update_cluster(id, values):
|
||||
if verification_base.update_verification_required(values):
|
||||
OPS.handle_verification(id, values)
|
||||
api.OPS.handle_verification(id, values)
|
||||
return conductor.cluster_get(context.ctx(), id)
|
||||
return conductor.cluster_update(context.ctx(), id, values)
|
||||
|
@ -22,6 +22,7 @@ from sahara import context
|
||||
from sahara import exceptions as ex
|
||||
from sahara.i18n import _LE
|
||||
from sahara.plugins import base as plugin_base
|
||||
from sahara.service import api
|
||||
from sahara.service.edp.binary_retrievers import dispatch
|
||||
from sahara.service.edp import job_manager as manager
|
||||
from sahara.utils import edp
|
||||
@ -33,15 +34,6 @@ LOG = logging.getLogger(__name__)
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
OPS = None
|
||||
|
||||
|
||||
def setup_edp_api(ops):
|
||||
global OPS
|
||||
|
||||
OPS = ops
|
||||
|
||||
|
||||
def get_job_types(**kwargs):
|
||||
# Return a dictionary of all the job types that can be run
|
||||
# by this instance of Sahara. For each job type, the value
|
||||
@ -142,7 +134,7 @@ def execute_job(job_id, data):
|
||||
conductor.job_execution_destroy(context.ctx(), job_execution)
|
||||
raise e
|
||||
|
||||
OPS.run_edp_job(job_execution.id)
|
||||
api.OPS.run_edp_job(job_execution.id)
|
||||
|
||||
return job_execution
|
||||
|
||||
@ -163,7 +155,7 @@ def get_job_execution(id):
|
||||
def cancel_job_execution(id):
|
||||
context.set_current_job_execution_id(id)
|
||||
job_execution = conductor.job_execution_get(context.ctx(), id)
|
||||
OPS.cancel_job_execution(id)
|
||||
api.OPS.cancel_job_execution(id)
|
||||
|
||||
return job_execution
|
||||
|
||||
@ -177,12 +169,12 @@ def _update_status(info):
|
||||
if info:
|
||||
status = info.get("status", None)
|
||||
if status == edp.JOB_ACTION_SUSPEND:
|
||||
OPS.job_execution_suspend(id)
|
||||
api.OPS.job_execution_suspend(id)
|
||||
|
||||
|
||||
def delete_job_execution(id):
|
||||
context.set_current_job_execution_id(id)
|
||||
OPS.delete_job_execution(id)
|
||||
api.OPS.delete_job_execution(id)
|
||||
|
||||
|
||||
def get_data_sources(**kwargs):
|
0
sahara/service/api/v2/__init__.py
Normal file
0
sahara/service/api/v2/__init__.py
Normal file
43
sahara/service/api/v2/cluster_templates.py
Normal file
43
sahara/service/api/v2/cluster_templates.py
Normal file
@ -0,0 +1,43 @@
|
||||
# Copyright (c) 2016 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from sahara import conductor as c
|
||||
from sahara import context
|
||||
|
||||
|
||||
conductor = c.API
|
||||
|
||||
|
||||
# ClusterTemplate ops
|
||||
|
||||
def get_cluster_templates(**kwargs):
|
||||
return conductor.cluster_template_get_all(context.ctx(),
|
||||
regex_search=True, **kwargs)
|
||||
|
||||
|
||||
def get_cluster_template(id):
|
||||
return conductor.cluster_template_get(context.ctx(), id)
|
||||
|
||||
|
||||
def create_cluster_template(values):
|
||||
return conductor.cluster_template_create(context.ctx(), values)
|
||||
|
||||
|
||||
def terminate_cluster_template(id):
|
||||
return conductor.cluster_template_destroy(context.ctx(), id)
|
||||
|
||||
|
||||
def update_cluster_template(id, values):
|
||||
return conductor.cluster_template_update(context.ctx(), id, values)
|
175
sahara/service/api/v2/clusters.py
Normal file
175
sahara/service/api/v2/clusters.py
Normal file
@ -0,0 +1,175 @@
|
||||
# Copyright (c) 2016 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from oslo_utils import excutils
|
||||
import six
|
||||
|
||||
from sahara import conductor as c
|
||||
from sahara import context
|
||||
from sahara.plugins import base as plugin_base
|
||||
from sahara.service import api
|
||||
from sahara.service.health import verification_base
|
||||
from sahara.service import quotas
|
||||
from sahara.utils import cluster as c_u
|
||||
from sahara.utils import general as g
|
||||
from sahara.utils.notification import sender
|
||||
|
||||
|
||||
conductor = c.API
|
||||
|
||||
|
||||
# Cluster ops
|
||||
|
||||
|
||||
def get_clusters(**kwargs):
|
||||
return conductor.cluster_get_all(context.ctx(),
|
||||
regex_search=True, **kwargs)
|
||||
|
||||
|
||||
def get_cluster(id, show_progress=False):
|
||||
return conductor.cluster_get(context.ctx(), id, show_progress)
|
||||
|
||||
|
||||
def scale_cluster(id, data):
|
||||
context.set_current_cluster_id(id)
|
||||
ctx = context.ctx()
|
||||
|
||||
cluster = conductor.cluster_get(ctx, id)
|
||||
plugin = plugin_base.PLUGINS.get_plugin(cluster.plugin_name)
|
||||
existing_node_groups = data.get('resize_node_groups', [])
|
||||
additional_node_groups = data.get('add_node_groups', [])
|
||||
|
||||
# the next map is the main object we will work with
|
||||
# to_be_enlarged : {node_group_id: desired_amount_of_instances}
|
||||
to_be_enlarged = {}
|
||||
for ng in existing_node_groups:
|
||||
ng_id = g.find(cluster.node_groups, name=ng['name'])['id']
|
||||
to_be_enlarged.update({ng_id: ng['count']})
|
||||
|
||||
additional = construct_ngs_for_scaling(cluster, additional_node_groups)
|
||||
cluster = conductor.cluster_get(ctx, cluster)
|
||||
_add_ports_for_auto_sg(ctx, cluster, plugin)
|
||||
|
||||
try:
|
||||
cluster = c_u.change_cluster_status(
|
||||
cluster, c_u.CLUSTER_STATUS_VALIDATING)
|
||||
quotas.check_scaling(cluster, to_be_enlarged, additional)
|
||||
plugin.recommend_configs(cluster, scaling=True)
|
||||
plugin.validate_scaling(cluster, to_be_enlarged, additional)
|
||||
except Exception as e:
|
||||
with excutils.save_and_reraise_exception():
|
||||
c_u.clean_cluster_from_empty_ng(cluster)
|
||||
c_u.change_cluster_status(
|
||||
cluster, c_u.CLUSTER_STATUS_ACTIVE, six.text_type(e))
|
||||
|
||||
# If we are here validation is successful.
|
||||
# So let's update to_be_enlarged map:
|
||||
to_be_enlarged.update(additional)
|
||||
|
||||
for node_group in cluster.node_groups:
|
||||
if node_group.id not in to_be_enlarged:
|
||||
to_be_enlarged[node_group.id] = node_group.count
|
||||
|
||||
api.OPS.provision_scaled_cluster(id, to_be_enlarged)
|
||||
return cluster
|
||||
|
||||
|
||||
def create_cluster(values):
|
||||
plugin = plugin_base.PLUGINS.get_plugin(values['plugin_name'])
|
||||
return _cluster_create(values, plugin)
|
||||
|
||||
|
||||
def create_multiple_clusters(values):
|
||||
num_of_clusters = values['count']
|
||||
clusters = []
|
||||
plugin = plugin_base.PLUGINS.get_plugin(values['plugin_name'])
|
||||
for counter in range(num_of_clusters):
|
||||
cluster_dict = values.copy()
|
||||
cluster_name = cluster_dict['name']
|
||||
cluster_dict['name'] = get_multiple_cluster_name(num_of_clusters,
|
||||
cluster_name,
|
||||
counter + 1)
|
||||
cluster = _cluster_create(cluster_dict, plugin)
|
||||
|
||||
clusters.append(cluster.id)
|
||||
|
||||
clusters_dict = {'clusters': clusters}
|
||||
return clusters_dict
|
||||
|
||||
|
||||
def _cluster_create(values, plugin):
|
||||
ctx = context.ctx()
|
||||
cluster = conductor.cluster_create(ctx, values)
|
||||
context.set_current_cluster_id(cluster.id)
|
||||
sender.status_notify(cluster.id, cluster.name, "New",
|
||||
"create")
|
||||
_add_ports_for_auto_sg(ctx, cluster, plugin)
|
||||
|
||||
# validating cluster
|
||||
try:
|
||||
plugin.recommend_configs(cluster)
|
||||
cluster = c_u.change_cluster_status(
|
||||
cluster, c_u.CLUSTER_STATUS_VALIDATING)
|
||||
quotas.check_cluster(cluster)
|
||||
plugin.validate(cluster)
|
||||
except Exception as e:
|
||||
with excutils.save_and_reraise_exception():
|
||||
c_u.change_cluster_status(
|
||||
cluster, c_u.CLUSTER_STATUS_ERROR, six.text_type(e))
|
||||
|
||||
api.OPS.provision_cluster(cluster.id)
|
||||
|
||||
return cluster
|
||||
|
||||
|
||||
def get_multiple_cluster_name(num_of_clusters, name, counter):
|
||||
return "%%s-%%0%dd" % len(str(num_of_clusters)) % (name, counter)
|
||||
|
||||
|
||||
def _add_ports_for_auto_sg(ctx, cluster, plugin):
|
||||
for ng in cluster.node_groups:
|
||||
if ng.auto_security_group:
|
||||
ports = {'open_ports': plugin.get_open_ports(ng)}
|
||||
conductor.node_group_update(ctx, ng, ports)
|
||||
|
||||
|
||||
def terminate_cluster(id):
|
||||
context.set_current_cluster_id(id)
|
||||
cluster = c_u.change_cluster_status(id, c_u.CLUSTER_STATUS_DELETING)
|
||||
|
||||
if cluster is None:
|
||||
return
|
||||
|
||||
api.OPS.terminate_cluster(id)
|
||||
sender.status_notify(cluster.id, cluster.name, cluster.status,
|
||||
"delete")
|
||||
|
||||
|
||||
def update_cluster(id, values):
|
||||
if verification_base.update_verification_required(values):
|
||||
api.OPS.handle_verification(id, values)
|
||||
return conductor.cluster_get(context.ctx(), id)
|
||||
return conductor.cluster_update(context.ctx(), id, values)
|
||||
|
||||
|
||||
def construct_ngs_for_scaling(cluster, additional_node_groups):
|
||||
ctx = context.ctx()
|
||||
additional = {}
|
||||
for ng in additional_node_groups:
|
||||
count = ng['count']
|
||||
ng['count'] = 0
|
||||
ng_id = conductor.node_group_add(ctx, cluster, ng)
|
||||
additional.update({ng_id: count})
|
||||
return additional
|
41
sahara/service/api/v2/data_sources.py
Normal file
41
sahara/service/api/v2/data_sources.py
Normal file
@ -0,0 +1,41 @@
|
||||
# Copyright (c) 2016 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from sahara import conductor as c
|
||||
from sahara import context
|
||||
|
||||
|
||||
conductor = c.API
|
||||
|
||||
|
||||
def get_data_sources(**kwargs):
|
||||
return conductor.data_source_get_all(context.ctx(),
|
||||
regex_search=True, **kwargs)
|
||||
|
||||
|
||||
def get_data_source(id):
|
||||
return conductor.data_source_get(context.ctx(), id)
|
||||
|
||||
|
||||
def delete_data_source(id):
|
||||
conductor.data_source_destroy(context.ctx(), id)
|
||||
|
||||
|
||||
def register_data_source(values):
|
||||
return conductor.data_source_create(context.ctx(), values)
|
||||
|
||||
|
||||
def data_source_update(id, values):
|
||||
return conductor.data_source_update(context.ctx(), id, values)
|
66
sahara/service/api/v2/images.py
Normal file
66
sahara/service/api/v2/images.py
Normal file
@ -0,0 +1,66 @@
|
||||
# Copyright (c) 2016 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from sahara import conductor as c
|
||||
from sahara.utils.openstack import base as b
|
||||
from sahara.utils.openstack import nova
|
||||
|
||||
|
||||
conductor = c.API
|
||||
|
||||
|
||||
# Image Registry
|
||||
|
||||
|
||||
def get_images(name, tags):
|
||||
return b.execute_with_retries(
|
||||
nova.client().images.list_registered, name, tags)
|
||||
|
||||
|
||||
def get_image(**kwargs):
|
||||
if len(kwargs) == 1 and 'id' in kwargs:
|
||||
return b.execute_with_retries(nova.client().images.get, kwargs['id'])
|
||||
else:
|
||||
return b.execute_with_retries(nova.client().images.find, **kwargs)
|
||||
|
||||
|
||||
def get_registered_image(id):
|
||||
return b.execute_with_retries(
|
||||
nova.client().images.get_registered_image, id)
|
||||
|
||||
|
||||
def register_image(image_id, username, description=None):
|
||||
client = nova.client()
|
||||
b.execute_with_retries(
|
||||
client.images.set_description, image_id, username, description)
|
||||
return b.execute_with_retries(client.images.get, image_id)
|
||||
|
||||
|
||||
def unregister_image(image_id):
|
||||
client = nova.client()
|
||||
b.execute_with_retries(client.images.unset_description, image_id)
|
||||
return b.execute_with_retries(client.images.get, image_id)
|
||||
|
||||
|
||||
def add_image_tags(image_id, tags):
|
||||
client = nova.client()
|
||||
b.execute_with_retries(client.images.tag, image_id, tags)
|
||||
return b.execute_with_retries(client.images.get, image_id)
|
||||
|
||||
|
||||
def remove_image_tags(image_id, tags):
|
||||
client = nova.client()
|
||||
b.execute_with_retries(client.images.untag, image_id, tags)
|
||||
return b.execute_with_retries(client.images.get, image_id)
|
72
sahara/service/api/v2/job_binaries.py
Normal file
72
sahara/service/api/v2/job_binaries.py
Normal file
@ -0,0 +1,72 @@
|
||||
# Copyright (c) 2016 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from sahara import conductor as c
|
||||
from sahara import context
|
||||
from sahara.service.edp.binary_retrievers import dispatch
|
||||
|
||||
|
||||
conductor = c.API
|
||||
|
||||
|
||||
def create_job_binary(values):
|
||||
return conductor.job_binary_create(context.ctx(), values)
|
||||
|
||||
|
||||
def get_job_binaries(**kwargs):
|
||||
return conductor.job_binary_get_all(context.ctx(),
|
||||
regex_search=True, **kwargs)
|
||||
|
||||
|
||||
def get_job_binary(id):
|
||||
return conductor.job_binary_get(context.ctx(), id)
|
||||
|
||||
|
||||
def update_job_binary(id, values):
|
||||
return conductor.job_binary_update(context.ctx(), id, values)
|
||||
|
||||
|
||||
def delete_job_binary(id):
|
||||
conductor.job_binary_destroy(context.ctx(), id)
|
||||
|
||||
|
||||
def create_job_binary_internal(values):
|
||||
return conductor.job_binary_internal_create(context.ctx(), values)
|
||||
|
||||
|
||||
def get_job_binary_internals(**kwargs):
|
||||
return conductor.job_binary_internal_get_all(context.ctx(),
|
||||
regex_search=True, **kwargs)
|
||||
|
||||
|
||||
def get_job_binary_internal(id):
|
||||
return conductor.job_binary_internal_get(context.ctx(), id)
|
||||
|
||||
|
||||
def delete_job_binary_internal(id):
|
||||
conductor.job_binary_internal_destroy(context.ctx(), id)
|
||||
|
||||
|
||||
def get_job_binary_internal_data(id):
|
||||
return conductor.job_binary_internal_get_raw_data(context.ctx(), id)
|
||||
|
||||
|
||||
def update_job_binary_internal(id, values):
|
||||
return conductor.job_binary_internal_update(context.ctx(), id, values)
|
||||
|
||||
|
||||
def get_job_binary_data(id):
|
||||
job_binary = conductor.job_binary_get(context.ctx(), id)
|
||||
return dispatch.get_raw_binary(job_binary, with_context=True)
|
109
sahara/service/api/v2/job_executions.py
Normal file
109
sahara/service/api/v2/job_executions.py
Normal file
@ -0,0 +1,109 @@
|
||||
# Copyright (c) 2016 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from oslo_log import log as logging
|
||||
|
||||
from sahara import conductor as c
|
||||
from sahara import context
|
||||
from sahara import exceptions as ex
|
||||
from sahara.i18n import _LE
|
||||
from sahara.service import api
|
||||
from sahara.service.edp import job_manager as manager
|
||||
from sahara.utils import edp
|
||||
from sahara.utils import proxy as p
|
||||
|
||||
|
||||
conductor = c.API
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def execute_job(job_id, data):
|
||||
# Elements common to all job types
|
||||
cluster_id = data['cluster_id']
|
||||
configs = data.get('job_configs', {})
|
||||
interface = data.get('interface', {})
|
||||
|
||||
# Not in Java job types but present for all others
|
||||
input_id = data.get('input_id', None)
|
||||
output_id = data.get('output_id', None)
|
||||
|
||||
# Since we will use a unified class in the database, we pass
|
||||
# a superset for all job types
|
||||
# example configs['start'] = '2015-05-12T08:55Z' frequency = 5 means
|
||||
# the job will starts from 2015-05-12T08:55Z, runs every 5 mins
|
||||
|
||||
job_execution_info = data.get('job_execution_info', {})
|
||||
|
||||
configs['job_execution_info'] = job_execution_info
|
||||
|
||||
job_ex_dict = {'input_id': input_id, 'output_id': output_id,
|
||||
'job_id': job_id, 'cluster_id': cluster_id,
|
||||
'info': {'status': edp.JOB_STATUS_PENDING},
|
||||
'job_configs': configs, 'extra': {},
|
||||
'interface': interface}
|
||||
job_execution = conductor.job_execution_create(context.ctx(), job_ex_dict)
|
||||
context.set_current_job_execution_id(job_execution.id)
|
||||
|
||||
# check to use proxy user
|
||||
if p.job_execution_requires_proxy_user(job_execution):
|
||||
try:
|
||||
p.create_proxy_user_for_job_execution(job_execution)
|
||||
except ex.SaharaException as e:
|
||||
LOG.error(_LE("Can't run job execution. "
|
||||
"(Reasons: {reason})").format(reason=e))
|
||||
conductor.job_execution_destroy(context.ctx(), job_execution)
|
||||
raise e
|
||||
|
||||
api.OPS.run_edp_job(job_execution.id)
|
||||
|
||||
return job_execution
|
||||
|
||||
|
||||
def get_job_execution_status(id):
|
||||
return manager.get_job_status(id)
|
||||
|
||||
|
||||
def job_execution_list(**kwargs):
|
||||
return conductor.job_execution_get_all(context.ctx(),
|
||||
regex_search=True, **kwargs)
|
||||
|
||||
|
||||
def get_job_execution(id):
|
||||
return conductor.job_execution_get(context.ctx(), id)
|
||||
|
||||
|
||||
def cancel_job_execution(id):
|
||||
context.set_current_job_execution_id(id)
|
||||
job_execution = conductor.job_execution_get(context.ctx(), id)
|
||||
api.OPS.cancel_job_execution(id)
|
||||
|
||||
return job_execution
|
||||
|
||||
|
||||
def update_job_execution(id, values):
|
||||
_update_status(values.pop("info", None))
|
||||
return conductor.job_execution_update(context.ctx(), id, values)
|
||||
|
||||
|
||||
def _update_status(info):
|
||||
if info:
|
||||
status = info.get("status", None)
|
||||
if status == edp.JOB_ACTION_SUSPEND:
|
||||
api.OPS.job_execution_suspend(id)
|
||||
|
||||
|
||||
def delete_job_execution(id):
|
||||
context.set_current_job_execution_id(id)
|
||||
api.OPS.delete_job_execution(id)
|
78
sahara/service/api/v2/job_types.py
Normal file
78
sahara/service/api/v2/job_types.py
Normal file
@ -0,0 +1,78 @@
|
||||
# Copyright (c) 2016 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import six
|
||||
|
||||
from sahara.plugins import base as plugin_base
|
||||
from sahara.utils import edp
|
||||
|
||||
|
||||
def get_job_types(**kwargs):
|
||||
# Return a dictionary of all the job types that can be run
|
||||
# by this instance of Sahara. For each job type, the value
|
||||
# will be a list of plugins that support the job type. For
|
||||
# each plugin, include a dictionary of the versions that
|
||||
# support the job type.
|
||||
|
||||
# All entries in kwargs are expected to have list values
|
||||
hints = kwargs.get("hints", ["false"])[0].lower() == "true"
|
||||
|
||||
plugin_names = kwargs.get("plugin", [])
|
||||
all_plugins = plugin_base.PLUGINS.get_plugins()
|
||||
if plugin_names:
|
||||
plugins = filter(lambda x: x.name in plugin_names, all_plugins)
|
||||
else:
|
||||
plugins = all_plugins
|
||||
|
||||
job_types = kwargs.get("type", edp.JOB_TYPES_ALL)
|
||||
versions = kwargs.get("version", [])
|
||||
|
||||
res = []
|
||||
|
||||
for job_type in job_types:
|
||||
# All job types supported by all versions of the plugin.
|
||||
# This is a dictionary where keys are plugin version
|
||||
# strings and values are lists of job types
|
||||
job_entry = {"name": job_type,
|
||||
"plugins": []}
|
||||
|
||||
for plugin in plugins:
|
||||
types_for_plugin = plugin.get_edp_job_types(versions)
|
||||
|
||||
# dict returns a new object so we are not modifying the plugin
|
||||
p = plugin.dict
|
||||
|
||||
# Find only the versions of this plugin that support the job.
|
||||
# Additionally, instead of a list we want a dictionary of
|
||||
# plugin versions with corresponding config hints
|
||||
p["versions"] = {}
|
||||
|
||||
for version, supported_types in six.iteritems(types_for_plugin):
|
||||
if job_type in supported_types:
|
||||
if hints:
|
||||
config_hints = plugin.get_edp_config_hints(job_type,
|
||||
version)
|
||||
else:
|
||||
config_hints = {}
|
||||
p["versions"][version] = config_hints
|
||||
|
||||
# If we found at least one version of the plugin that
|
||||
# supports the job type, add the plugin to the result
|
||||
if p["versions"]:
|
||||
job_entry["plugins"].append(p)
|
||||
|
||||
if job_entry["plugins"]:
|
||||
res.append(job_entry)
|
||||
return res
|
45
sahara/service/api/v2/jobs.py
Normal file
45
sahara/service/api/v2/jobs.py
Normal file
@ -0,0 +1,45 @@
|
||||
# Copyright (c) 2016 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from sahara import conductor as c
|
||||
from sahara import context
|
||||
from sahara.service.edp import job_manager as manager
|
||||
|
||||
|
||||
conductor = c.API
|
||||
|
||||
|
||||
def get_jobs(**kwargs):
|
||||
return conductor.job_get_all(context.ctx(), regex_search=True, **kwargs)
|
||||
|
||||
|
||||
def get_job(id):
|
||||
return conductor.job_get(context.ctx(), id)
|
||||
|
||||
|
||||
def create_job(values):
|
||||
return conductor.job_create(context.ctx(), values)
|
||||
|
||||
|
||||
def update_job(id, values):
|
||||
return conductor.job_update(context.ctx(), id, values)
|
||||
|
||||
|
||||
def delete_job(job_id):
|
||||
return conductor.job_destroy(context.ctx(), job_id)
|
||||
|
||||
|
||||
def get_job_config_hints(job_type):
|
||||
return manager.get_job_config_hints(job_type)
|
43
sahara/service/api/v2/node_group_templates.py
Normal file
43
sahara/service/api/v2/node_group_templates.py
Normal file
@ -0,0 +1,43 @@
|
||||
# Copyright (c) 2016 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from sahara import conductor as c
|
||||
from sahara import context
|
||||
|
||||
|
||||
conductor = c.API
|
||||
|
||||
|
||||
# NodeGroupTemplate ops
|
||||
|
||||
def get_node_group_templates(**kwargs):
|
||||
return conductor.node_group_template_get_all(context.ctx(),
|
||||
regex_search=True, **kwargs)
|
||||
|
||||
|
||||
def get_node_group_template(id):
|
||||
return conductor.node_group_template_get(context.ctx(), id)
|
||||
|
||||
|
||||
def create_node_group_template(values):
|
||||
return conductor.node_group_template_create(context.ctx(), values)
|
||||
|
||||
|
||||
def terminate_node_group_template(id):
|
||||
return conductor.node_group_template_destroy(context.ctx(), id)
|
||||
|
||||
|
||||
def update_node_group_template(id, values):
|
||||
return conductor.node_group_template_update(context.ctx(), id, values)
|
65
sahara/service/api/v2/plugins.py
Normal file
65
sahara/service/api/v2/plugins.py
Normal file
@ -0,0 +1,65 @@
|
||||
# Copyright (c) 2016 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from six.moves.urllib import parse as urlparse
|
||||
|
||||
from sahara import conductor as c
|
||||
from sahara import context
|
||||
from sahara.plugins import base as plugin_base
|
||||
|
||||
|
||||
conductor = c.API
|
||||
|
||||
|
||||
# Plugins ops
|
||||
|
||||
def get_plugins():
|
||||
return plugin_base.PLUGINS.get_plugins()
|
||||
|
||||
|
||||
def get_plugin(plugin_name, version=None):
|
||||
plugin = plugin_base.PLUGINS.get_plugin(plugin_name)
|
||||
if plugin:
|
||||
res = plugin.as_resource()
|
||||
if version:
|
||||
if version in plugin.get_versions():
|
||||
configs = plugin.get_all_configs(version)
|
||||
res._info['configs'] = [c.dict for c in configs]
|
||||
processes = plugin.get_node_processes(version)
|
||||
res._info['node_processes'] = processes
|
||||
required_image_tags = plugin.get_required_image_tags(version)
|
||||
res._info['required_image_tags'] = required_image_tags
|
||||
else:
|
||||
return None
|
||||
return res
|
||||
|
||||
|
||||
def convert_to_cluster_template(plugin_name, version, template_name,
|
||||
config_file):
|
||||
plugin = plugin_base.PLUGINS.get_plugin(plugin_name)
|
||||
return plugin.convert(config_file, plugin_name, version,
|
||||
urlparse.unquote(template_name),
|
||||
conductor.cluster_template_create)
|
||||
|
||||
|
||||
def construct_ngs_for_scaling(cluster, additional_node_groups):
|
||||
ctx = context.ctx()
|
||||
additional = {}
|
||||
for ng in additional_node_groups:
|
||||
count = ng['count']
|
||||
ng['count'] = 0
|
||||
ng_id = conductor.node_group_add(ctx, cluster, ng)
|
||||
additional.update({ng_id: count})
|
||||
return additional
|
@ -26,7 +26,7 @@ import six
|
||||
from sahara import conductor as c
|
||||
from sahara import context
|
||||
from sahara.i18n import _LW
|
||||
from sahara.service import api
|
||||
from sahara.service.api import v10 as api
|
||||
from sahara.service import coordinator
|
||||
from sahara.service.edp import job_manager
|
||||
from sahara.service.health import verification_base as vb
|
||||
|
@ -24,7 +24,7 @@ from sahara import context
|
||||
import sahara.exceptions as ex
|
||||
from sahara.i18n import _
|
||||
import sahara.plugins.base as plugin_base
|
||||
import sahara.service.api as api
|
||||
from sahara.service.api import v10 as api
|
||||
from sahara.utils import general as g
|
||||
import sahara.utils.openstack.cinder as cinder
|
||||
import sahara.utils.openstack.nova as nova
|
||||
|
@ -16,7 +16,7 @@
|
||||
|
||||
from sahara import exceptions as ex
|
||||
from sahara.i18n import _
|
||||
from sahara.service import api
|
||||
from sahara.service.api import v10 as api
|
||||
import sahara.service.validations.base as b
|
||||
from sahara.service.validations import shares
|
||||
|
||||
|
@ -18,7 +18,7 @@ from oslo_config import cfg
|
||||
from sahara import context
|
||||
import sahara.exceptions as ex
|
||||
from sahara.i18n import _
|
||||
import sahara.service.api as api
|
||||
from sahara.service.api import v10 as api
|
||||
from sahara.service.health import verification_base
|
||||
from sahara.service.validations import acl
|
||||
import sahara.service.validations.base as b
|
||||
|
@ -18,7 +18,8 @@ from sahara import context
|
||||
import sahara.exceptions as ex
|
||||
from sahara.i18n import _
|
||||
import sahara.plugins.base as plugin_base
|
||||
import sahara.service.api as api
|
||||
from sahara.service import api as service_api
|
||||
from sahara.service.api import v10 as api
|
||||
from sahara.service.validations import acl
|
||||
import sahara.service.validations.base as b
|
||||
from sahara.utils import cluster as c_u
|
||||
@ -38,7 +39,7 @@ def check_cluster_scaling(data, cluster_id, **kwargs):
|
||||
cluster_engine = cluster.sahara_info.get(
|
||||
'infrastructure_engine') if cluster.sahara_info else None
|
||||
|
||||
engine_type_and_version = api.OPS.get_engine_type_and_version()
|
||||
engine_type_and_version = service_api.OPS.get_engine_type_and_version()
|
||||
if (not cluster_engine and
|
||||
not engine_type_and_version.startswith('direct')):
|
||||
raise ex.InvalidReferenceException(
|
||||
|
@ -15,7 +15,7 @@
|
||||
|
||||
import sahara.exceptions as e
|
||||
from sahara.i18n import _
|
||||
from sahara.service.edp import api
|
||||
from sahara.service.api import v11 as api
|
||||
from sahara.service.validations.edp import job_interface as j_i
|
||||
from sahara.utils import edp
|
||||
|
||||
|
@ -16,7 +16,7 @@
|
||||
|
||||
from sahara import exceptions as ex
|
||||
from sahara.i18n import _
|
||||
from sahara.service import api
|
||||
from sahara.service.api import v10 as api
|
||||
import sahara.service.validations.base as b
|
||||
from sahara.service.validations import shares
|
||||
|
||||
|
0
sahara/tests/unit/service/api/__init__.py
Normal file
0
sahara/tests/unit/service/api/__init__.py
Normal file
@ -22,7 +22,8 @@ from sahara import conductor as cond
|
||||
from sahara import context
|
||||
from sahara import exceptions as exc
|
||||
from sahara.plugins import base as pl_base
|
||||
from sahara.service import api
|
||||
from sahara.service import api as service_api
|
||||
from sahara.service.api import v10 as api
|
||||
from sahara.tests.unit import base
|
||||
from sahara.utils import cluster as c_u
|
||||
|
||||
@ -162,7 +163,7 @@ class TestApi(base.SaharaWithDbTestCase):
|
||||
super(TestApi, self).setUp()
|
||||
self.calls_order = []
|
||||
pl_base.PLUGINS = FakePluginManager(self.calls_order)
|
||||
api.setup_service_api(FakeOps(self.calls_order))
|
||||
service_api.setup_api(FakeOps(self.calls_order))
|
||||
oslo_messaging.notify.notifier.Notifier.info = mock.Mock()
|
||||
self.ctx = context.ctx()
|
||||
|
@ -55,7 +55,7 @@ class TestPeriodicBack(base.SaharaWithDbTestCase):
|
||||
mock.call(u'3')])
|
||||
|
||||
@mock.patch('sahara.service.trusts.use_os_admin_auth_token')
|
||||
@mock.patch('sahara.service.api.terminate_cluster')
|
||||
@mock.patch('sahara.service.api.v10.terminate_cluster')
|
||||
def test_transient_cluster_terminate(self, terminate_cluster,
|
||||
use_os_admin_auth_token):
|
||||
|
||||
@ -88,7 +88,7 @@ class TestPeriodicBack(base.SaharaWithDbTestCase):
|
||||
terminate_cluster.assert_has_calls([mock.call(u'1')])
|
||||
self.assertEqual(1, use_os_admin_auth_token.call_count)
|
||||
|
||||
@mock.patch('sahara.service.api.terminate_cluster')
|
||||
@mock.patch('sahara.service.api.v10.terminate_cluster')
|
||||
def test_not_transient_cluster_does_not_terminate(self, terminate_cluster):
|
||||
|
||||
timeutils.set_time_override(datetime.datetime(2005, 2, 1, 0, 0))
|
||||
@ -98,7 +98,7 @@ class TestPeriodicBack(base.SaharaWithDbTestCase):
|
||||
|
||||
self.assertEqual(0, terminate_cluster.call_count)
|
||||
|
||||
@mock.patch('sahara.service.api.terminate_cluster')
|
||||
@mock.patch('sahara.service.api.v10.terminate_cluster')
|
||||
def test_transient_cluster_not_killed_too_early(self, terminate_cluster):
|
||||
|
||||
timeutils.set_time_override(datetime.datetime(2005, 2, 1, second=0))
|
||||
@ -111,7 +111,7 @@ class TestPeriodicBack(base.SaharaWithDbTestCase):
|
||||
self.assertEqual(0, terminate_cluster.call_count)
|
||||
|
||||
@mock.patch('sahara.service.trusts.use_os_admin_auth_token')
|
||||
@mock.patch('sahara.service.api.terminate_cluster')
|
||||
@mock.patch('sahara.service.api.v10.terminate_cluster')
|
||||
def test_transient_cluster_killed_in_time(self, terminate_cluster,
|
||||
use_os_admin_auth_token):
|
||||
|
||||
@ -126,7 +126,7 @@ class TestPeriodicBack(base.SaharaWithDbTestCase):
|
||||
terminate_cluster.assert_has_calls([mock.call(u'1')])
|
||||
self.assertEqual(1, use_os_admin_auth_token.call_count)
|
||||
|
||||
@mock.patch('sahara.service.api.terminate_cluster')
|
||||
@mock.patch('sahara.service.api.v10.terminate_cluster')
|
||||
def test_incomplete_cluster_not_killed_too_early(self, terminate_cluster):
|
||||
|
||||
self.override_config('cleanup_time_for_incomplete_clusters', 1)
|
||||
@ -142,7 +142,7 @@ class TestPeriodicBack(base.SaharaWithDbTestCase):
|
||||
self.assertEqual(0, terminate_cluster.call_count)
|
||||
|
||||
@mock.patch('sahara.service.trusts.use_os_admin_auth_token')
|
||||
@mock.patch('sahara.service.api.terminate_cluster')
|
||||
@mock.patch('sahara.service.api.v10.terminate_cluster')
|
||||
def test_incomplete_cluster_killed_in_time(self, terminate_cluster,
|
||||
use_os_admin_auth_token):
|
||||
|
||||
@ -159,7 +159,7 @@ class TestPeriodicBack(base.SaharaWithDbTestCase):
|
||||
terminate_cluster.assert_has_calls([mock.call(u'1')])
|
||||
self.assertEqual(1, use_os_admin_auth_token.call_count)
|
||||
|
||||
@mock.patch('sahara.service.api.terminate_cluster')
|
||||
@mock.patch('sahara.service.api.v10.terminate_cluster')
|
||||
def test_active_cluster_not_killed_as_inactive(
|
||||
self, terminate_cluster):
|
||||
self.override_config('cleanup_time_for_incomplete_clusters', 1)
|
||||
@ -210,7 +210,7 @@ class TestPeriodicBack(base.SaharaWithDbTestCase):
|
||||
|
||||
@mock.patch(
|
||||
'sahara.service.health.verification_base.validate_verification_start')
|
||||
@mock.patch('sahara.service.api.update_cluster')
|
||||
@mock.patch('sahara.service.api.v10.update_cluster')
|
||||
def test_run_verifications_executed(self, cluster_update, ver_valid):
|
||||
self._make_cluster('1')
|
||||
p._make_periodic_tasks().run_verifications(None)
|
||||
@ -220,7 +220,7 @@ class TestPeriodicBack(base.SaharaWithDbTestCase):
|
||||
|
||||
@mock.patch(
|
||||
'sahara.service.health.verification_base.validate_verification_start')
|
||||
@mock.patch('sahara.service.api.update_cluster')
|
||||
@mock.patch('sahara.service.api.v10.update_cluster')
|
||||
def test_run_verifications_not_executed(self, cluster_update, ver_valid):
|
||||
self._make_cluster('1', status=c_u.CLUSTER_STATUS_ERROR)
|
||||
p._make_periodic_tasks().run_verifications(None)
|
||||
|
@ -19,7 +19,7 @@ import mock
|
||||
import testtools
|
||||
|
||||
import sahara.exceptions as ex
|
||||
from sahara.service import api
|
||||
from sahara.service.api import v10 as api
|
||||
from sahara.service.validations.edp import data_source as ds
|
||||
from sahara.service.validations.edp import data_source_schema as ds_schema
|
||||
from sahara.swift import utils as su
|
||||
|
@ -38,7 +38,7 @@ class TestJobCreateValidation(u.ValidationTestCase):
|
||||
bad_req_i=(1, "VALIDATION_ERROR",
|
||||
"type: 'Jar' is not one of " + str(edp.JOB_TYPES_ALL)))
|
||||
|
||||
@mock.patch('sahara.service.edp.api.get_job_binary')
|
||||
@mock.patch('sahara.service.api.v11.get_job_binary')
|
||||
def test_check_binaries(self, get_job_binary):
|
||||
get_job_binary.return_value = "value"
|
||||
j._check_binaries(["one", "two"])
|
||||
|
@ -13,7 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from sahara.service import api
|
||||
from sahara.service.api import v10 as api
|
||||
from sahara.service.validations.edp import job_binary as b
|
||||
from sahara.service.validations.edp import job_binary_schema as b_s
|
||||
from sahara.swift import utils as su
|
||||
|
@ -15,7 +15,7 @@
|
||||
|
||||
import mock
|
||||
|
||||
from sahara.service import api
|
||||
from sahara.service.api import v10 as api
|
||||
from sahara.service.validations.edp import job_binary_internal as jb
|
||||
from sahara.service.validations.edp import job_binary_internal_schema as jbs
|
||||
from sahara.tests.unit.service.validation import utils as u
|
||||
|
@ -23,7 +23,7 @@ import testtools
|
||||
|
||||
from sahara import exceptions as ex
|
||||
from sahara import main
|
||||
from sahara.service import api
|
||||
from sahara.service.api import v10 as api
|
||||
from sahara.service.validations.edp import job_execution as je
|
||||
from sahara.service.validations.edp import job_execution_schema as je_schema
|
||||
from sahara.tests.unit.service.validation import utils as u
|
||||
|
@ -18,7 +18,7 @@ import six
|
||||
import testtools
|
||||
|
||||
from sahara import exceptions
|
||||
from sahara.service import api
|
||||
from sahara.service.api import v10 as api
|
||||
from sahara.service.validations import clusters as c
|
||||
from sahara.service.validations import clusters_schema as c_schema
|
||||
from sahara.tests.unit import base
|
||||
|
@ -27,7 +27,7 @@ class TestClusterDeleteValidation(u.ValidationTestCase):
|
||||
super(TestClusterDeleteValidation, self).setUp()
|
||||
self.setup_context(tenant_id='tenant1')
|
||||
|
||||
@mock.patch('sahara.service.api.get_cluster')
|
||||
@mock.patch('sahara.service.api.v10.get_cluster')
|
||||
def test_cluster_delete_when_protected(self, get_cluster_p):
|
||||
cluster = tu.create_cluster("cluster1", "tenant1", "fake",
|
||||
"0.1", ['ng1'], is_protected=True)
|
||||
@ -40,7 +40,7 @@ class TestClusterDeleteValidation(u.ValidationTestCase):
|
||||
self.assert_protected_resource_exception(e)
|
||||
raise e
|
||||
|
||||
@mock.patch('sahara.service.api.get_cluster')
|
||||
@mock.patch('sahara.service.api.v10.get_cluster')
|
||||
def test_public_cluster_delete_from_another_tenant(self, get_cluster_p):
|
||||
cluster = tu.create_cluster("cluster1", "tenant2", "fake",
|
||||
"0.1", ['ng1'], is_public=True)
|
||||
|
@ -19,7 +19,7 @@ import testtools
|
||||
|
||||
from sahara import exceptions as ex
|
||||
from sahara.plugins.fake import plugin
|
||||
from sahara.service import api
|
||||
from sahara.service.api import v10 as api
|
||||
import sahara.service.validation as v
|
||||
from sahara.service.validations import clusters_scaling as c_s
|
||||
from sahara.service.validations import clusters_schema as c_schema
|
||||
@ -43,7 +43,7 @@ class TestScalingValidation(u.ValidationTestCase):
|
||||
" detected: ['a']")
|
||||
self.setup_context(tenant_id='tenant1')
|
||||
|
||||
@mock.patch('sahara.service.api.get_cluster')
|
||||
@mock.patch('sahara.service.api.v10.get_cluster')
|
||||
@mock.patch('sahara.plugins.base.PluginManager.get_plugin')
|
||||
def _assert_check_scaling(self,
|
||||
get_plugin_p=None,
|
||||
|
@ -17,7 +17,7 @@ import uuid
|
||||
|
||||
import mock
|
||||
|
||||
from sahara.service import api
|
||||
from sahara.service.api import v10 as api
|
||||
from sahara.service.validations import cluster_template_schema as ct_schema
|
||||
from sahara.service.validations import cluster_templates as ct
|
||||
from sahara.tests.unit.service.validation import utils as u
|
||||
|
@ -17,7 +17,7 @@ import copy
|
||||
|
||||
import mock
|
||||
|
||||
from sahara.service import api
|
||||
from sahara.service.api import v10 as api
|
||||
from sahara.service.validations import cluster_template_schema as ct_schema
|
||||
from sahara.tests.unit.service.validation import utils as u
|
||||
|
||||
|
@ -18,7 +18,7 @@ import mock
|
||||
import testtools
|
||||
|
||||
from sahara import exceptions as ex
|
||||
from sahara.service import api
|
||||
from sahara.service.api import v10 as api
|
||||
from sahara.service.health import verification_base
|
||||
from sahara.service.validations import clusters as c_val
|
||||
from sahara.service.validations import clusters_schema as c_schema
|
||||
@ -68,7 +68,7 @@ class TestClusterUpdateValidation(u.ValidationTestCase):
|
||||
"('id' was unexpected)")
|
||||
)
|
||||
|
||||
@mock.patch('sahara.service.api.get_cluster')
|
||||
@mock.patch('sahara.service.api.v10.get_cluster')
|
||||
def test_cluster_update_when_protected(self, get_cluster_p):
|
||||
cluster = tu.create_cluster("cluster1", "tenant_1", "fake",
|
||||
"0.1", ['ng1'], is_protected=True)
|
||||
@ -86,7 +86,7 @@ class TestClusterUpdateValidation(u.ValidationTestCase):
|
||||
c_val.check_cluster_update(
|
||||
cluster.id, {'is_protected': False, 'name': 'new'})
|
||||
|
||||
@mock.patch('sahara.service.api.get_cluster')
|
||||
@mock.patch('sahara.service.api.v10.get_cluster')
|
||||
def test_public_cluster_update_from_another_tenant(self, get_cluster_p):
|
||||
cluster = tu.create_cluster("cluster1", "tenant_2", "fake",
|
||||
"0.1", ['ng1'], is_public=True)
|
||||
|
@ -15,7 +15,7 @@
|
||||
|
||||
import mock
|
||||
|
||||
from sahara.service import api
|
||||
from sahara.service.api import v10 as api
|
||||
from sahara.service.validations import node_group_template_schema as ngt_schema
|
||||
from sahara.service.validations import node_group_templates as nt
|
||||
from sahara.tests.unit.service.validation import utils as u
|
||||
|
@ -15,7 +15,7 @@
|
||||
|
||||
import copy
|
||||
|
||||
from sahara.service import api
|
||||
from sahara.service.api import v10 as api
|
||||
from sahara.service.validations import node_group_template_schema as nt
|
||||
from sahara.tests.unit.service.validation import utils as u
|
||||
|
||||
|
@ -121,24 +121,24 @@ def _get_security_groups_list():
|
||||
|
||||
|
||||
def start_patch(patch_templates=True):
|
||||
get_clusters_p = mock.patch("sahara.service.api.get_clusters")
|
||||
get_cluster_p = mock.patch("sahara.service.api.get_cluster")
|
||||
get_clusters_p = mock.patch("sahara.service.api.v10.get_clusters")
|
||||
get_cluster_p = mock.patch("sahara.service.api.v10.get_cluster")
|
||||
if patch_templates:
|
||||
get_ng_templates_p = mock.patch(
|
||||
"sahara.service.api.get_node_group_templates")
|
||||
"sahara.service.api.v10.get_node_group_templates")
|
||||
get_ng_template_p = mock.patch(
|
||||
"sahara.service.api.get_node_group_template")
|
||||
"sahara.service.api.v10.get_node_group_template")
|
||||
if patch_templates:
|
||||
get_cl_templates_p = mock.patch(
|
||||
"sahara.service.api.get_cluster_templates")
|
||||
"sahara.service.api.v10.get_cluster_templates")
|
||||
get_cl_template_p = mock.patch(
|
||||
"sahara.service.api.get_cluster_template")
|
||||
"sahara.service.api.v10.get_cluster_template")
|
||||
nova_p = mock.patch("sahara.utils.openstack.nova.client")
|
||||
heat_p = mock.patch("sahara.utils.openstack.heat.client")
|
||||
cinder_p = mock.patch("sahara.utils.openstack.cinder.client")
|
||||
cinder_exists_p = mock.patch(
|
||||
"sahara.utils.openstack.cinder.check_cinder_exists")
|
||||
get_image_p = mock.patch("sahara.service.api.get_image")
|
||||
get_image_p = mock.patch("sahara.service.api.v10.get_image")
|
||||
|
||||
get_image = get_image_p.start()
|
||||
get_clusters = get_clusters_p.start()
|
||||
|
Loading…
Reference in New Issue
Block a user