Adding shared and protected resources support
Adding shared across tenants and protected from modifictaion resources support. Implemented for clusters, cluster templates, node group templates, data sources, job executions, jobs, job binaries and job binary internals. Changes: * Added validation checks to service.validations.acl * Changed validation schema's to support "is_public" and "is_protected" fields * Added validation of "is_protected" field for update(scale/cancel) and delete methods of corresponding resources * Extended get and list methods outputs with "is_public" resources * Added unit tests for "is_public" and "is_protected" resources Change-Id: I1a3cb14b8de70256e6aa27312dde341e85fc376c Partially-Implements: blueprint shared-protected-resources
This commit is contained in:
parent
f084139cd7
commit
c3b1c08a57
@ -87,6 +87,7 @@ def clusters_update(cluster_id, data):
|
|||||||
@rest.delete('/clusters/<cluster_id>')
|
@rest.delete('/clusters/<cluster_id>')
|
||||||
@acl.enforce("data-processing:clusters:delete")
|
@acl.enforce("data-processing:clusters:delete")
|
||||||
@v.check_exists(api.get_cluster, 'cluster_id')
|
@v.check_exists(api.get_cluster, 'cluster_id')
|
||||||
|
@v.validate(None, v_c.check_cluster_delete)
|
||||||
def clusters_delete(cluster_id):
|
def clusters_delete(cluster_id):
|
||||||
api.terminate_cluster(cluster_id)
|
api.terminate_cluster(cluster_id)
|
||||||
return u.render()
|
return u.render()
|
||||||
|
@ -63,6 +63,7 @@ def job_executions_status(job_execution_id):
|
|||||||
@rest.get('/job-executions/<job_execution_id>/cancel')
|
@rest.get('/job-executions/<job_execution_id>/cancel')
|
||||||
@acl.enforce("data-processing:job-executions:cancel")
|
@acl.enforce("data-processing:job-executions:cancel")
|
||||||
@v.check_exists(api.get_job_execution, id='job_execution_id')
|
@v.check_exists(api.get_job_execution, id='job_execution_id')
|
||||||
|
@v.validate(None, v_j_e.check_job_execution_cancel)
|
||||||
def job_executions_cancel(job_execution_id):
|
def job_executions_cancel(job_execution_id):
|
||||||
return u.to_wrapped_dict(api.cancel_job_execution, job_execution_id)
|
return u.to_wrapped_dict(api.cancel_job_execution, job_execution_id)
|
||||||
|
|
||||||
@ -78,6 +79,7 @@ def job_executions_update(job_execution_id, data):
|
|||||||
@rest.delete('/job-executions/<job_execution_id>')
|
@rest.delete('/job-executions/<job_execution_id>')
|
||||||
@acl.enforce("data-processing:job-executions:delete")
|
@acl.enforce("data-processing:job-executions:delete")
|
||||||
@v.check_exists(api.get_job_execution, id='job_execution_id')
|
@v.check_exists(api.get_job_execution, id='job_execution_id')
|
||||||
|
@v.validate(None, v_j_e.check_job_execution_delete)
|
||||||
def job_executions_delete(job_execution_id):
|
def job_executions_delete(job_execution_id):
|
||||||
api.delete_job_execution(job_execution_id)
|
api.delete_job_execution(job_execution_id)
|
||||||
return u.render()
|
return u.render()
|
||||||
|
@ -180,6 +180,8 @@ class ConductorManager(db_base.Base):
|
|||||||
del c_tmpl['created_at']
|
del c_tmpl['created_at']
|
||||||
del c_tmpl['updated_at']
|
del c_tmpl['updated_at']
|
||||||
del c_tmpl['id']
|
del c_tmpl['id']
|
||||||
|
del c_tmpl['is_public']
|
||||||
|
del c_tmpl['is_protected']
|
||||||
|
|
||||||
# updating with cluster_template values
|
# updating with cluster_template values
|
||||||
merged_values.update(c_tmpl)
|
merged_values.update(c_tmpl)
|
||||||
@ -516,7 +518,7 @@ class ConductorManager(db_base.Base):
|
|||||||
job_binary_internal_id)
|
job_binary_internal_id)
|
||||||
|
|
||||||
def job_binary_internal_update(self, context, id, values):
|
def job_binary_internal_update(self, context, id, values):
|
||||||
"""Updates a Job from the values dictionary."""
|
"""Updates a JobBinaryInternal from the values dictionary."""
|
||||||
return self.db.job_binary_internal_update(context, id, values)
|
return self.db.job_binary_internal_update(context, id, values)
|
||||||
|
|
||||||
# Events ops
|
# Events ops
|
||||||
|
@ -29,6 +29,7 @@ from sahara.db.sqlalchemy import models as m
|
|||||||
from sahara import exceptions as ex
|
from sahara import exceptions as ex
|
||||||
from sahara.i18n import _
|
from sahara.i18n import _
|
||||||
from sahara.i18n import _LW
|
from sahara.i18n import _LW
|
||||||
|
from sahara.service.validations import acl as validate
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
@ -83,7 +84,9 @@ def model_query(model, context, session=None, project_only=True):
|
|||||||
query = session.query(model)
|
query = session.query(model)
|
||||||
|
|
||||||
if project_only and not context.is_admin:
|
if project_only and not context.is_admin:
|
||||||
query = query.filter_by(tenant_id=context.tenant_id)
|
query = query.filter(
|
||||||
|
(model.tenant_id == context.tenant_id) |
|
||||||
|
getattr(model, 'is_public', False))
|
||||||
|
|
||||||
return query
|
return query
|
||||||
|
|
||||||
@ -241,6 +244,10 @@ def cluster_update(context, cluster_id, values):
|
|||||||
if cluster is None:
|
if cluster is None:
|
||||||
raise ex.NotFoundException(cluster_id,
|
raise ex.NotFoundException(cluster_id,
|
||||||
_("Cluster id '%s' not found!"))
|
_("Cluster id '%s' not found!"))
|
||||||
|
|
||||||
|
validate.check_tenant_for_update(context, cluster)
|
||||||
|
validate.check_protected_from_update(cluster, values)
|
||||||
|
|
||||||
cluster.update(values)
|
cluster.update(values)
|
||||||
except db_exc.DBDuplicateEntry as e:
|
except db_exc.DBDuplicateEntry as e:
|
||||||
raise ex.DBDuplicateEntry(
|
raise ex.DBDuplicateEntry(
|
||||||
@ -441,6 +448,9 @@ def cluster_template_destroy(context, cluster_template_id,
|
|||||||
_("Cluster template id '%s' "
|
_("Cluster template id '%s' "
|
||||||
"is a default template") % cluster_template.id)
|
"is a default template") % cluster_template.id)
|
||||||
|
|
||||||
|
validate.check_tenant_for_delete(context, cluster_template)
|
||||||
|
validate.check_protected_from_delete(cluster_template)
|
||||||
|
|
||||||
session.delete(cluster_template)
|
session.delete(cluster_template)
|
||||||
|
|
||||||
|
|
||||||
@ -469,6 +479,9 @@ def cluster_template_update(context, values, ignore_default=False):
|
|||||||
"It is a default template.")
|
"It is a default template.")
|
||||||
)
|
)
|
||||||
|
|
||||||
|
validate.check_tenant_for_update(context, cluster_template)
|
||||||
|
validate.check_protected_from_update(cluster_template, values)
|
||||||
|
|
||||||
if len(cluster_template.clusters) > 0:
|
if len(cluster_template.clusters) > 0:
|
||||||
raise ex.UpdateFailedException(
|
raise ex.UpdateFailedException(
|
||||||
cluster_template_id,
|
cluster_template_id,
|
||||||
@ -549,6 +562,9 @@ def node_group_template_destroy(context, node_group_template_id,
|
|||||||
_("Node group template id '%s' "
|
_("Node group template id '%s' "
|
||||||
"is a default template") % node_group_template_id)
|
"is a default template") % node_group_template_id)
|
||||||
|
|
||||||
|
validate.check_tenant_for_delete(context, node_group_template)
|
||||||
|
validate.check_protected_from_delete(node_group_template)
|
||||||
|
|
||||||
session.delete(node_group_template)
|
session.delete(node_group_template)
|
||||||
|
|
||||||
|
|
||||||
@ -568,6 +584,9 @@ def node_group_template_update(context, values, ignore_default=False):
|
|||||||
"It is a default template.")
|
"It is a default template.")
|
||||||
)
|
)
|
||||||
|
|
||||||
|
validate.check_tenant_for_update(context, ngt)
|
||||||
|
validate.check_protected_from_update(ngt, values)
|
||||||
|
|
||||||
# Check to see that the node group template to be updated is not in
|
# Check to see that the node group template to be updated is not in
|
||||||
# use by an existing cluster.
|
# use by an existing cluster.
|
||||||
for template_relationship in ngt.templates_relations:
|
for template_relationship in ngt.templates_relations:
|
||||||
@ -649,6 +668,10 @@ def data_source_destroy(context, data_source_id):
|
|||||||
raise ex.NotFoundException(
|
raise ex.NotFoundException(
|
||||||
data_source_id,
|
data_source_id,
|
||||||
_("Data Source id '%s' not found!"))
|
_("Data Source id '%s' not found!"))
|
||||||
|
|
||||||
|
validate.check_tenant_for_delete(context, data_source)
|
||||||
|
validate.check_protected_from_delete(data_source)
|
||||||
|
|
||||||
session.delete(data_source)
|
session.delete(data_source)
|
||||||
except db_exc.DBError as e:
|
except db_exc.DBError as e:
|
||||||
msg = ("foreign key constraint" in six.text_type(e) and
|
msg = ("foreign key constraint" in six.text_type(e) and
|
||||||
@ -665,16 +688,21 @@ def data_source_update(context, values):
|
|||||||
if not data_source:
|
if not data_source:
|
||||||
raise ex.NotFoundException(
|
raise ex.NotFoundException(
|
||||||
ds_id, _("DataSource id '%s' not found"))
|
ds_id, _("DataSource id '%s' not found"))
|
||||||
else:
|
|
||||||
jobs = job_execution_get_all(context)
|
validate.check_tenant_for_update(context, data_source)
|
||||||
pending_jobs = [job for job in jobs if
|
validate.check_protected_from_update(data_source, values)
|
||||||
job.info["status"] == "PENDING"]
|
|
||||||
for job in pending_jobs:
|
jobs = job_execution_get_all(context)
|
||||||
if job.data_source_urls:
|
pending_jobs = [job for job in jobs if
|
||||||
if ds_id in job.data_source_urls:
|
job.info["status"] == "PENDING"]
|
||||||
raise ex.UpdateFailedException(
|
|
||||||
_("DataSource is used in a "
|
for job in pending_jobs:
|
||||||
"PENDING Job and can not be updated."))
|
if job.data_source_urls:
|
||||||
|
if ds_id in job.data_source_urls:
|
||||||
|
raise ex.UpdateFailedException(
|
||||||
|
_("DataSource is used in a "
|
||||||
|
"PENDING Job and can not be updated."))
|
||||||
|
|
||||||
data_source.update(values)
|
data_source.update(values)
|
||||||
except db_exc.DBDuplicateEntry as e:
|
except db_exc.DBDuplicateEntry as e:
|
||||||
raise ex.DBDuplicateEntry(
|
raise ex.DBDuplicateEntry(
|
||||||
@ -817,6 +845,12 @@ def job_execution_update(context, job_execution_id, values):
|
|||||||
if not job_ex:
|
if not job_ex:
|
||||||
raise ex.NotFoundException(job_execution_id,
|
raise ex.NotFoundException(job_execution_id,
|
||||||
_("JobExecution id '%s' not found!"))
|
_("JobExecution id '%s' not found!"))
|
||||||
|
|
||||||
|
# Skip this check for periodic tasks
|
||||||
|
if context.tenant_id:
|
||||||
|
validate.check_tenant_for_update(context, job_ex)
|
||||||
|
validate.check_protected_from_update(job_ex, values)
|
||||||
|
|
||||||
job_ex.update(values)
|
job_ex.update(values)
|
||||||
session.add(job_ex)
|
session.add(job_ex)
|
||||||
|
|
||||||
@ -900,13 +934,16 @@ def job_create(context, values):
|
|||||||
|
|
||||||
def job_update(context, job_id, values):
|
def job_update(context, job_id, values):
|
||||||
session = get_session()
|
session = get_session()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with session.begin():
|
with session.begin():
|
||||||
job = _job_get(context, session, job_id)
|
job = _job_get(context, session, job_id)
|
||||||
if not job:
|
if not job:
|
||||||
raise ex.NotFoundException(job_id,
|
raise ex.NotFoundException(job_id,
|
||||||
_("Job id '%s' not found!"))
|
_("Job id '%s' not found!"))
|
||||||
|
|
||||||
|
validate.check_tenant_for_update(context, job)
|
||||||
|
validate.check_protected_from_update(job, values)
|
||||||
|
|
||||||
job.update(values)
|
job.update(values)
|
||||||
session.add(job)
|
session.add(job)
|
||||||
except db_exc.DBDuplicateEntry as e:
|
except db_exc.DBDuplicateEntry as e:
|
||||||
@ -924,6 +961,10 @@ def job_destroy(context, job_id):
|
|||||||
if not job:
|
if not job:
|
||||||
raise ex.NotFoundException(job_id,
|
raise ex.NotFoundException(job_id,
|
||||||
_("Job id '%s' not found!"))
|
_("Job id '%s' not found!"))
|
||||||
|
|
||||||
|
validate.check_tenant_for_delete(context, job)
|
||||||
|
validate.check_protected_from_delete(job)
|
||||||
|
|
||||||
session.delete(job)
|
session.delete(job)
|
||||||
except db_exc.DBError as e:
|
except db_exc.DBError as e:
|
||||||
msg = ("foreign key constraint" in six.text_type(e) and
|
msg = ("foreign key constraint" in six.text_type(e) and
|
||||||
@ -984,6 +1025,10 @@ def job_binary_update(context, values):
|
|||||||
if not jb:
|
if not jb:
|
||||||
raise ex.NotFoundException(
|
raise ex.NotFoundException(
|
||||||
jb_id, _("JobBinary id '%s' not found"))
|
jb_id, _("JobBinary id '%s' not found"))
|
||||||
|
|
||||||
|
validate.check_tenant_for_update(context, jb)
|
||||||
|
validate.check_protected_from_update(jb, values)
|
||||||
|
|
||||||
# We do not want to update the url for internal binaries
|
# We do not want to update the url for internal binaries
|
||||||
new_url = values.get("url", None)
|
new_url = values.get("url", None)
|
||||||
if new_url and "internal-db://" in jb["url"]:
|
if new_url and "internal-db://" in jb["url"]:
|
||||||
@ -1031,6 +1076,9 @@ def job_binary_destroy(context, job_binary_id):
|
|||||||
raise ex.NotFoundException(job_binary_id,
|
raise ex.NotFoundException(job_binary_id,
|
||||||
_("JobBinary id '%s' not found!"))
|
_("JobBinary id '%s' not found!"))
|
||||||
|
|
||||||
|
validate.check_tenant_for_delete(context, job_binary)
|
||||||
|
validate.check_protected_from_delete(job_binary)
|
||||||
|
|
||||||
if _check_job_binary_referenced(context, session, job_binary_id):
|
if _check_job_binary_referenced(context, session, job_binary_id):
|
||||||
raise ex.DeletionFailed(
|
raise ex.DeletionFailed(
|
||||||
_("JobBinary is referenced and cannot be deleted"))
|
_("JobBinary is referenced and cannot be deleted"))
|
||||||
@ -1118,6 +1166,9 @@ def job_binary_internal_destroy(context, job_binary_internal_id):
|
|||||||
job_binary_internal_id,
|
job_binary_internal_id,
|
||||||
_("JobBinaryInternal id '%s' not found!"))
|
_("JobBinaryInternal id '%s' not found!"))
|
||||||
|
|
||||||
|
validate.check_tenant_for_delete(context, job_binary_internal)
|
||||||
|
validate.check_protected_from_delete(job_binary_internal)
|
||||||
|
|
||||||
session.delete(job_binary_internal)
|
session.delete(job_binary_internal)
|
||||||
|
|
||||||
|
|
||||||
@ -1132,6 +1183,10 @@ def job_binary_internal_update(context, job_binary_internal_id, values):
|
|||||||
raise ex.NotFoundException(
|
raise ex.NotFoundException(
|
||||||
job_binary_internal_id,
|
job_binary_internal_id,
|
||||||
_("JobBinaryInternal id '%s' not found!"))
|
_("JobBinaryInternal id '%s' not found!"))
|
||||||
|
|
||||||
|
validate.check_tenant_for_update(context, j_b_i)
|
||||||
|
validate.check_protected_from_update(j_b_i, values)
|
||||||
|
|
||||||
j_b_i.update(values)
|
j_b_i.update(values)
|
||||||
except db_exc.DBDuplicateEntry as e:
|
except db_exc.DBDuplicateEntry as e:
|
||||||
raise ex.DBDuplicateEntry(
|
raise ex.DBDuplicateEntry(
|
||||||
|
52
sahara/service/validations/acl.py
Normal file
52
sahara/service/validations/acl.py
Normal file
@ -0,0 +1,52 @@
|
|||||||
|
# Copyright (c) 2013 Mirantis Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
# implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
|
||||||
|
from sahara import exceptions as ex
|
||||||
|
from sahara.i18n import _
|
||||||
|
|
||||||
|
|
||||||
|
def check_tenant_for_delete(context, object):
|
||||||
|
if object.tenant_id != context.tenant_id:
|
||||||
|
raise ex.DeletionFailed(
|
||||||
|
_("{object} with id '{id}' could not be deleted because "
|
||||||
|
"it wasn't created in this tenant").format(
|
||||||
|
object=type(object).__name__, id=object.id))
|
||||||
|
|
||||||
|
|
||||||
|
def check_tenant_for_update(context, object):
|
||||||
|
if object.tenant_id != context.tenant_id:
|
||||||
|
raise ex.UpdateFailedException(
|
||||||
|
object.id,
|
||||||
|
_("{object} with id '%s' could not be updated because "
|
||||||
|
"it wasn't created in this tenant").format(
|
||||||
|
object=type(object).__name__))
|
||||||
|
|
||||||
|
|
||||||
|
def check_protected_from_delete(object):
|
||||||
|
if object.is_protected:
|
||||||
|
raise ex.DeletionFailed(
|
||||||
|
_("{object} with id '{id}' could not be deleted because "
|
||||||
|
"it's marked as protected").format(
|
||||||
|
object=type(object).__name__, id=object.id))
|
||||||
|
|
||||||
|
|
||||||
|
def check_protected_from_update(object, data):
|
||||||
|
if object.is_protected and data.get('is_protected', True):
|
||||||
|
raise ex.UpdateFailedException(
|
||||||
|
object.id,
|
||||||
|
_("{object} with id '%s' could not be updated "
|
||||||
|
"because it's marked as protected").format(
|
||||||
|
object=type(object).__name__))
|
@ -93,6 +93,12 @@ CLUSTER_TEMPLATE_SCHEMA = {
|
|||||||
"shares": copy.deepcopy(shares.SHARE_SCHEMA),
|
"shares": copy.deepcopy(shares.SHARE_SCHEMA),
|
||||||
"use_autoconfig": {
|
"use_autoconfig": {
|
||||||
"type": ["boolean", "null"],
|
"type": ["boolean", "null"],
|
||||||
|
},
|
||||||
|
"is_public": {
|
||||||
|
"type": ["boolean", "null"],
|
||||||
|
},
|
||||||
|
"is_protected": {
|
||||||
|
"type": ["boolean", "null"],
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"additionalProperties": False,
|
"additionalProperties": False,
|
||||||
|
@ -15,9 +15,11 @@
|
|||||||
|
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
|
|
||||||
|
from sahara import context
|
||||||
import sahara.exceptions as ex
|
import sahara.exceptions as ex
|
||||||
from sahara.i18n import _
|
from sahara.i18n import _
|
||||||
import sahara.service.api as api
|
import sahara.service.api as api
|
||||||
|
from sahara.service.validations import acl
|
||||||
import sahara.service.validations.base as b
|
import sahara.service.validations.base as b
|
||||||
|
|
||||||
|
|
||||||
@ -101,3 +103,10 @@ def _get_cluster_field(cluster, field):
|
|||||||
return cluster_template[field]
|
return cluster_template[field]
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def check_cluster_delete(cluster_id, **kwargs):
|
||||||
|
cluster = api.get_cluster(cluster_id)
|
||||||
|
|
||||||
|
acl.check_tenant_for_delete(context.current(), cluster)
|
||||||
|
acl.check_protected_from_delete(cluster)
|
||||||
|
@ -13,19 +13,28 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
|
||||||
|
from sahara import context
|
||||||
import sahara.exceptions as ex
|
import sahara.exceptions as ex
|
||||||
from sahara.i18n import _
|
from sahara.i18n import _
|
||||||
import sahara.plugins.base as plugin_base
|
import sahara.plugins.base as plugin_base
|
||||||
import sahara.service.api as api
|
import sahara.service.api as api
|
||||||
|
from sahara.service.validations import acl
|
||||||
import sahara.service.validations.base as b
|
import sahara.service.validations.base as b
|
||||||
from sahara.utils import cluster as c_u
|
from sahara.utils import cluster as c_u
|
||||||
|
|
||||||
|
|
||||||
def check_cluster_scaling(data, cluster_id, **kwargs):
|
def check_cluster_scaling(data, cluster_id, **kwargs):
|
||||||
|
ctx = context.current()
|
||||||
cluster = api.get_cluster(id=cluster_id)
|
cluster = api.get_cluster(id=cluster_id)
|
||||||
|
|
||||||
if cluster is None:
|
if cluster is None:
|
||||||
raise ex.NotFoundException(
|
raise ex.NotFoundException(
|
||||||
{'id': cluster_id}, _('Object with %s not found'))
|
{'id': cluster_id}, _('Object with %s not found'))
|
||||||
|
|
||||||
|
acl.check_tenant_for_update(ctx, cluster)
|
||||||
|
acl.check_protected_from_update(cluster, data)
|
||||||
|
|
||||||
cluster_engine = cluster.sahara_info.get(
|
cluster_engine = cluster.sahara_info.get(
|
||||||
'infrastructure_engine') if cluster.sahara_info else None
|
'infrastructure_engine') if cluster.sahara_info else None
|
||||||
|
|
||||||
|
@ -60,6 +60,12 @@ CLUSTER_UPDATE_SCHEMA = {
|
|||||||
"minLength": 1,
|
"minLength": 1,
|
||||||
"maxLength": 50,
|
"maxLength": 50,
|
||||||
"format": "valid_name_hostname",
|
"format": "valid_name_hostname",
|
||||||
|
},
|
||||||
|
"is_public": {
|
||||||
|
"type": ["boolean", "null"],
|
||||||
|
},
|
||||||
|
"is_protected": {
|
||||||
|
"type": ["boolean", "null"],
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"additionalProperties": False,
|
"additionalProperties": False,
|
||||||
|
@ -35,6 +35,12 @@ DATA_SOURCE_SCHEMA = {
|
|||||||
},
|
},
|
||||||
"credentials": {
|
"credentials": {
|
||||||
"type": "object"
|
"type": "object"
|
||||||
|
},
|
||||||
|
"is_public": {
|
||||||
|
"type": ["boolean", "null"],
|
||||||
|
},
|
||||||
|
"is_protected": {
|
||||||
|
"type": ["boolean", "null"],
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"additionalProperties": False,
|
"additionalProperties": False,
|
||||||
|
@ -23,6 +23,12 @@ JOB_BINARY_UPDATE_SCHEMA = {
|
|||||||
"maxLength": 50,
|
"maxLength": 50,
|
||||||
"format": "valid_name"
|
"format": "valid_name"
|
||||||
},
|
},
|
||||||
|
"is_public": {
|
||||||
|
"type": ["boolean", "null"],
|
||||||
|
},
|
||||||
|
"is_protected": {
|
||||||
|
"type": ["boolean", "null"],
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"additionalProperties": False,
|
"additionalProperties": False,
|
||||||
"required": []
|
"required": []
|
||||||
|
@ -31,6 +31,12 @@ JOB_BINARY_SCHEMA = {
|
|||||||
"type": "string",
|
"type": "string",
|
||||||
"format": "valid_job_location"
|
"format": "valid_job_location"
|
||||||
},
|
},
|
||||||
|
"is_public": {
|
||||||
|
"type": ["boolean", "null"],
|
||||||
|
},
|
||||||
|
"is_protected": {
|
||||||
|
"type": ["boolean", "null"],
|
||||||
|
},
|
||||||
# extra is simple_config for now because we may need not only
|
# extra is simple_config for now because we may need not only
|
||||||
# user-password it the case of external storage
|
# user-password it the case of external storage
|
||||||
"extra": {
|
"extra": {
|
||||||
|
@ -18,6 +18,7 @@ from sahara import context
|
|||||||
from sahara import exceptions as ex
|
from sahara import exceptions as ex
|
||||||
from sahara.i18n import _
|
from sahara.i18n import _
|
||||||
from sahara.plugins import base as plugin_base
|
from sahara.plugins import base as plugin_base
|
||||||
|
from sahara.service.validations import acl
|
||||||
import sahara.service.validations.edp.base as b
|
import sahara.service.validations.edp.base as b
|
||||||
import sahara.service.validations.edp.job_interface as j_i
|
import sahara.service.validations.edp.job_interface as j_i
|
||||||
|
|
||||||
@ -83,3 +84,27 @@ def check_data_sources(data, job):
|
|||||||
b.check_data_source_exists(data['output_id'])
|
b.check_data_source_exists(data['output_id'])
|
||||||
|
|
||||||
b.check_data_sources_are_different(data['input_id'], data['output_id'])
|
b.check_data_sources_are_different(data['input_id'], data['output_id'])
|
||||||
|
|
||||||
|
|
||||||
|
def check_job_execution_cancel(job_execution_id, **kwargs):
|
||||||
|
ctx = context.current()
|
||||||
|
je = conductor.job_execution_get(ctx, job_execution_id)
|
||||||
|
|
||||||
|
if je.tenant_id != ctx.tenant_id:
|
||||||
|
raise ex.CancelingFailed(
|
||||||
|
_("Job execution with id '%s' cannot be canceled "
|
||||||
|
"because it wasn't created in this tenant")
|
||||||
|
% job_execution_id)
|
||||||
|
|
||||||
|
if je.is_protected:
|
||||||
|
raise ex.CancelingFailed(
|
||||||
|
_("Job Execution with id '%s' cannot be canceled "
|
||||||
|
"because it's marked as protected") % job_execution_id)
|
||||||
|
|
||||||
|
|
||||||
|
def check_job_execution_delete(job_execution_id, **kwargs):
|
||||||
|
ctx = context.current()
|
||||||
|
je = conductor.job_execution_get(ctx, job_execution_id)
|
||||||
|
|
||||||
|
acl.check_tenant_for_delete(ctx, je)
|
||||||
|
acl.check_protected_from_delete(je)
|
||||||
|
@ -36,6 +36,12 @@ JOB_EXEC_SCHEMA = {
|
|||||||
"type": "simple_config",
|
"type": "simple_config",
|
||||||
},
|
},
|
||||||
"job_configs": b.job_configs,
|
"job_configs": b.job_configs,
|
||||||
|
"is_public": {
|
||||||
|
"type": ["boolean", "null"],
|
||||||
|
},
|
||||||
|
"is_protected": {
|
||||||
|
"type": ["boolean", "null"],
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"additionalProperties": False,
|
"additionalProperties": False,
|
||||||
"required": [
|
"required": [
|
||||||
@ -46,7 +52,14 @@ JOB_EXEC_SCHEMA = {
|
|||||||
|
|
||||||
JOB_EXEC_UPDATE_SCHEMA = {
|
JOB_EXEC_UPDATE_SCHEMA = {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {},
|
"properties": {
|
||||||
|
"is_public": {
|
||||||
|
"type": ["boolean", "null"],
|
||||||
|
},
|
||||||
|
"is_protected": {
|
||||||
|
"type": ["boolean", "null"],
|
||||||
|
}
|
||||||
|
},
|
||||||
"additionalProperties": False,
|
"additionalProperties": False,
|
||||||
"required": []
|
"required": []
|
||||||
}
|
}
|
||||||
|
@ -53,7 +53,13 @@ JOB_SCHEMA = {
|
|||||||
"streaming": {
|
"streaming": {
|
||||||
"type": "boolean"
|
"type": "boolean"
|
||||||
},
|
},
|
||||||
"interface": j_i.INTERFACE_ARGUMENT_SCHEMA
|
"interface": j_i.INTERFACE_ARGUMENT_SCHEMA,
|
||||||
|
"is_public": {
|
||||||
|
"type": ["boolean", "null"],
|
||||||
|
},
|
||||||
|
"is_protected": {
|
||||||
|
"type": ["boolean", "null"],
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"additionalProperties": False,
|
"additionalProperties": False,
|
||||||
"required": [
|
"required": [
|
||||||
@ -74,6 +80,12 @@ JOB_UPDATE_SCHEMA = {
|
|||||||
},
|
},
|
||||||
"description": {
|
"description": {
|
||||||
"type": ["string", "null"]
|
"type": ["string", "null"]
|
||||||
|
},
|
||||||
|
"is_public": {
|
||||||
|
"type": ["boolean", "null"],
|
||||||
|
},
|
||||||
|
"is_protected": {
|
||||||
|
"type": ["boolean", "null"],
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"additionalProperties": False,
|
"additionalProperties": False,
|
||||||
|
@ -92,6 +92,12 @@ NODE_GROUP_TEMPLATE_SCHEMA = {
|
|||||||
"shares": copy.deepcopy(shares.SHARE_SCHEMA),
|
"shares": copy.deepcopy(shares.SHARE_SCHEMA),
|
||||||
"use_autoconfig": {
|
"use_autoconfig": {
|
||||||
"type": ["boolean", "null"]
|
"type": ["boolean", "null"]
|
||||||
|
},
|
||||||
|
"is_public": {
|
||||||
|
"type": ["boolean", "null"],
|
||||||
|
},
|
||||||
|
"is_protected": {
|
||||||
|
"type": ["boolean", "null"],
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"additionalProperties": False,
|
"additionalProperties": False,
|
||||||
|
@ -44,3 +44,9 @@ class ConductorManagerTestCase(base.SaharaWithDbTestCase):
|
|||||||
self.assertEqual(self._results[idx], check_val,
|
self.assertEqual(self._results[idx], check_val,
|
||||||
message="Check '%s' failed" % idx)
|
message="Check '%s' failed" % idx)
|
||||||
super(ConductorManagerTestCase, self).tearDown()
|
super(ConductorManagerTestCase, self).tearDown()
|
||||||
|
|
||||||
|
def assert_protected_resource_exception(self, ex):
|
||||||
|
self.assertIn("marked as protected", str(ex))
|
||||||
|
|
||||||
|
def assert_created_in_another_tenant_exception(self, ex):
|
||||||
|
self.assertIn("wasn't created in this tenant", str(ex))
|
||||||
|
@ -73,7 +73,7 @@ class ClusterTest(test_base.ConductorManagerTestCase):
|
|||||||
lambda: manager.INSTANCE_DEFAULTS,
|
lambda: manager.INSTANCE_DEFAULTS,
|
||||||
], *args, **kwargs)
|
], *args, **kwargs)
|
||||||
|
|
||||||
def test_cluster_create_list_delete(self):
|
def test_cluster_create_list_update_delete(self):
|
||||||
ctx = context.ctx()
|
ctx = context.ctx()
|
||||||
cluster_db_obj = self.api.cluster_create(ctx, SAMPLE_CLUSTER)
|
cluster_db_obj = self.api.cluster_create(ctx, SAMPLE_CLUSTER)
|
||||||
self.assertIsInstance(cluster_db_obj, dict)
|
self.assertIsInstance(cluster_db_obj, dict)
|
||||||
@ -368,3 +368,35 @@ class ClusterTest(test_base.ConductorManagerTestCase):
|
|||||||
self.assertRaises(sa_exc.InvalidRequestError,
|
self.assertRaises(sa_exc.InvalidRequestError,
|
||||||
self.api.cluster_get_all,
|
self.api.cluster_get_all,
|
||||||
ctx, **{'badfield': 'somevalue'})
|
ctx, **{'badfield': 'somevalue'})
|
||||||
|
|
||||||
|
def test_cluster_update_when_protected(self):
|
||||||
|
ctx = context.ctx()
|
||||||
|
sample = copy.deepcopy(SAMPLE_CLUSTER)
|
||||||
|
sample['is_protected'] = True
|
||||||
|
cl = self.api.cluster_create(ctx, sample)
|
||||||
|
cl_id = cl["id"]
|
||||||
|
|
||||||
|
with testtools.ExpectedException(ex.UpdateFailedException):
|
||||||
|
try:
|
||||||
|
self.api.cluster_update(ctx, cl_id, {"name": "cluster"})
|
||||||
|
except ex.UpdateFailedException as e:
|
||||||
|
self.assert_protected_resource_exception(e)
|
||||||
|
raise e
|
||||||
|
|
||||||
|
self.api.cluster_update(ctx, cl_id, {"name": "cluster",
|
||||||
|
"is_protected": False})
|
||||||
|
|
||||||
|
def test_public_cluster_update_from_another_tenant(self):
|
||||||
|
ctx = context.ctx()
|
||||||
|
sample = copy.deepcopy(SAMPLE_CLUSTER)
|
||||||
|
sample['is_public'] = True
|
||||||
|
cl = self.api.cluster_create(ctx, sample)
|
||||||
|
cl_id = cl["id"]
|
||||||
|
ctx.tenant_id = 'tenant_2'
|
||||||
|
|
||||||
|
with testtools.ExpectedException(ex.UpdateFailedException):
|
||||||
|
try:
|
||||||
|
self.api.cluster_update(ctx, cl_id, {"name": "cluster"})
|
||||||
|
except ex.UpdateFailedException as e:
|
||||||
|
self.assert_created_in_another_tenant_exception(e)
|
||||||
|
raise e
|
||||||
|
@ -35,7 +35,9 @@ SAMPLE_DATA_SOURCE = {
|
|||||||
"credentials": {
|
"credentials": {
|
||||||
"user": "test",
|
"user": "test",
|
||||||
"password": "123"
|
"password": "123"
|
||||||
}
|
},
|
||||||
|
"is_public": False,
|
||||||
|
"is_protected": False
|
||||||
}
|
}
|
||||||
|
|
||||||
SAMPLE_JOB = {
|
SAMPLE_JOB = {
|
||||||
@ -43,7 +45,9 @@ SAMPLE_JOB = {
|
|||||||
"name": "job_test",
|
"name": "job_test",
|
||||||
"description": "test_desc",
|
"description": "test_desc",
|
||||||
"type": edp.JOB_TYPE_PIG,
|
"type": edp.JOB_TYPE_PIG,
|
||||||
"mains": []
|
"mains": [],
|
||||||
|
"is_public": False,
|
||||||
|
"is_protected": False
|
||||||
}
|
}
|
||||||
|
|
||||||
SAMPLE_JOB_EXECUTION = {
|
SAMPLE_JOB_EXECUTION = {
|
||||||
@ -53,7 +57,9 @@ SAMPLE_JOB_EXECUTION = {
|
|||||||
"input_id": "undefined",
|
"input_id": "undefined",
|
||||||
"output_id": "undefined",
|
"output_id": "undefined",
|
||||||
"start_time": datetime.datetime.now(),
|
"start_time": datetime.datetime.now(),
|
||||||
"cluster_id": None
|
"cluster_id": None,
|
||||||
|
"is_public": False,
|
||||||
|
"is_protected": False
|
||||||
}
|
}
|
||||||
|
|
||||||
SAMPLE_CONF_JOB_EXECUTION = {
|
SAMPLE_CONF_JOB_EXECUTION = {
|
||||||
@ -75,7 +81,9 @@ BINARY_DATA = b"vU}\x97\x1c\xdf\xa686\x08\xf2\tf\x0b\xb1}"
|
|||||||
SAMPLE_JOB_BINARY_INTERNAL = {
|
SAMPLE_JOB_BINARY_INTERNAL = {
|
||||||
"tenant_id": "test_tenant",
|
"tenant_id": "test_tenant",
|
||||||
"name": "job_test",
|
"name": "job_test",
|
||||||
"data": BINARY_DATA
|
"data": BINARY_DATA,
|
||||||
|
"is_public": False,
|
||||||
|
"is_protected": False
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -84,6 +92,8 @@ SAMPLE_JOB_BINARY = {
|
|||||||
"name": "job_binary_test",
|
"name": "job_binary_test",
|
||||||
"description": "test_dec",
|
"description": "test_dec",
|
||||||
"url": "internal-db://test_binary",
|
"url": "internal-db://test_binary",
|
||||||
|
"is_public": False,
|
||||||
|
"is_protected": False
|
||||||
}
|
}
|
||||||
|
|
||||||
SAMPLE_JOB_BINARY_UPDATE = {
|
SAMPLE_JOB_BINARY_UPDATE = {
|
||||||
@ -261,6 +271,52 @@ class DataSourceTest(test_base.ConductorManagerTestCase):
|
|||||||
new_info = {"status": edp.JOB_STATUS_PENDING}
|
new_info = {"status": edp.JOB_STATUS_PENDING}
|
||||||
self.api.job_execution_update(context, job_ex_id, {'info': new_info})
|
self.api.job_execution_update(context, job_ex_id, {'info': new_info})
|
||||||
|
|
||||||
|
def test_ds_update_delete_when_protected(self):
|
||||||
|
ctx = context.ctx()
|
||||||
|
sample = copy.deepcopy(SAMPLE_DATA_SOURCE)
|
||||||
|
sample['is_protected'] = True
|
||||||
|
ds = self.api.data_source_create(ctx, sample)
|
||||||
|
ds_id = ds["id"]
|
||||||
|
|
||||||
|
with testtools.ExpectedException(ex.UpdateFailedException):
|
||||||
|
try:
|
||||||
|
self.api.data_source_update(ctx, ds_id, {"name": "ds"})
|
||||||
|
except ex.UpdateFailedException as e:
|
||||||
|
self.assert_protected_resource_exception(e)
|
||||||
|
raise e
|
||||||
|
|
||||||
|
with testtools.ExpectedException(ex.DeletionFailed):
|
||||||
|
try:
|
||||||
|
self.api.data_source_destroy(ctx, ds_id)
|
||||||
|
except ex.DeletionFailed as e:
|
||||||
|
self.assert_protected_resource_exception(e)
|
||||||
|
raise e
|
||||||
|
|
||||||
|
self.api.data_source_update(ctx, ds_id,
|
||||||
|
{"name": "ds", "is_protected": False})
|
||||||
|
|
||||||
|
def test_public_ds_update_delete_from_another_tenant(self):
|
||||||
|
ctx = context.ctx()
|
||||||
|
sample = copy.deepcopy(SAMPLE_DATA_SOURCE)
|
||||||
|
sample['is_public'] = True
|
||||||
|
ds = self.api.data_source_create(ctx, sample)
|
||||||
|
ds_id = ds["id"]
|
||||||
|
ctx.tenant_id = 'tenant_2'
|
||||||
|
|
||||||
|
with testtools.ExpectedException(ex.UpdateFailedException):
|
||||||
|
try:
|
||||||
|
self.api.data_source_update(ctx, ds_id, {"name": "ds"})
|
||||||
|
except ex.UpdateFailedException as e:
|
||||||
|
self.assert_created_in_another_tenant_exception(e)
|
||||||
|
raise e
|
||||||
|
|
||||||
|
with testtools.ExpectedException(ex.DeletionFailed):
|
||||||
|
try:
|
||||||
|
self.api.data_source_destroy(ctx, ds_id)
|
||||||
|
except ex.DeletionFailed as e:
|
||||||
|
self.assert_created_in_another_tenant_exception(e)
|
||||||
|
raise e
|
||||||
|
|
||||||
|
|
||||||
class JobExecutionTest(test_base.ConductorManagerTestCase):
|
class JobExecutionTest(test_base.ConductorManagerTestCase):
|
||||||
def test_crud_operation_create_list_delete_update(self):
|
def test_crud_operation_create_list_delete_update(self):
|
||||||
@ -476,6 +532,59 @@ class JobExecutionTest(test_base.ConductorManagerTestCase):
|
|||||||
lst = self.api.job_execution_get_all(ctx, **kwargs)
|
lst = self.api.job_execution_get_all(ctx, **kwargs)
|
||||||
self.assertEqual(0, len(lst))
|
self.assertEqual(0, len(lst))
|
||||||
|
|
||||||
|
def test_je_update_when_protected(self):
|
||||||
|
ctx = context.ctx()
|
||||||
|
job = self.api.job_create(ctx, SAMPLE_JOB)
|
||||||
|
ds_input = self.api.data_source_create(ctx, SAMPLE_DATA_SOURCE)
|
||||||
|
SAMPLE_DATA_OUTPUT = copy.deepcopy(SAMPLE_DATA_SOURCE)
|
||||||
|
SAMPLE_DATA_OUTPUT['name'] = 'output'
|
||||||
|
ds_output = self.api.data_source_create(ctx, SAMPLE_DATA_OUTPUT)
|
||||||
|
|
||||||
|
sample = copy.deepcopy(SAMPLE_JOB_EXECUTION)
|
||||||
|
sample['is_protected'] = True
|
||||||
|
sample['job_id'] = job['id']
|
||||||
|
sample['input_id'] = ds_input['id']
|
||||||
|
sample['output_id'] = ds_output['id']
|
||||||
|
|
||||||
|
je = self.api.job_execution_create(ctx, sample)
|
||||||
|
je_id = je["id"]
|
||||||
|
|
||||||
|
with testtools.ExpectedException(ex.UpdateFailedException):
|
||||||
|
try:
|
||||||
|
self.api.job_execution_update(ctx, je_id, {"is_public": True})
|
||||||
|
except ex.UpdateFailedException as e:
|
||||||
|
self.assert_protected_resource_exception(e)
|
||||||
|
raise e
|
||||||
|
|
||||||
|
self.api.job_execution_update(ctx, je_id, {"is_protected": False,
|
||||||
|
"is_public": True})
|
||||||
|
|
||||||
|
def test_public_je_update_from_another_tenant(self):
|
||||||
|
ctx = context.ctx()
|
||||||
|
job = self.api.job_create(ctx, SAMPLE_JOB)
|
||||||
|
ds_input = self.api.data_source_create(ctx, SAMPLE_DATA_SOURCE)
|
||||||
|
SAMPLE_DATA_OUTPUT = copy.deepcopy(SAMPLE_DATA_SOURCE)
|
||||||
|
SAMPLE_DATA_OUTPUT['name'] = 'output'
|
||||||
|
ds_output = self.api.data_source_create(ctx, SAMPLE_DATA_OUTPUT)
|
||||||
|
|
||||||
|
sample = copy.deepcopy(SAMPLE_JOB_EXECUTION)
|
||||||
|
sample['is_public'] = True
|
||||||
|
sample['job_id'] = job['id']
|
||||||
|
sample['input_id'] = ds_input['id']
|
||||||
|
sample['output_id'] = ds_output['id']
|
||||||
|
|
||||||
|
je = self.api.job_execution_create(ctx, sample)
|
||||||
|
je_id = je["id"]
|
||||||
|
|
||||||
|
ctx.tenant_id = 'tenant_2'
|
||||||
|
|
||||||
|
with testtools.ExpectedException(ex.UpdateFailedException):
|
||||||
|
try:
|
||||||
|
self.api.job_execution_update(ctx, je_id, {"is_public": True})
|
||||||
|
except ex.UpdateFailedException as e:
|
||||||
|
self.assert_created_in_another_tenant_exception(e)
|
||||||
|
raise e
|
||||||
|
|
||||||
|
|
||||||
class JobTest(test_base.ConductorManagerTestCase):
|
class JobTest(test_base.ConductorManagerTestCase):
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
@ -540,6 +649,52 @@ class JobTest(test_base.ConductorManagerTestCase):
|
|||||||
self.api.job_get_all,
|
self.api.job_get_all,
|
||||||
ctx, **{'badfield': 'somevalue'})
|
ctx, **{'badfield': 'somevalue'})
|
||||||
|
|
||||||
|
def test_job_update_delete_when_protected(self):
|
||||||
|
ctx = context.ctx()
|
||||||
|
sample = copy.deepcopy(SAMPLE_JOB)
|
||||||
|
sample['is_protected'] = True
|
||||||
|
job = self.api.job_create(ctx, sample)
|
||||||
|
job_id = job["id"]
|
||||||
|
|
||||||
|
with testtools.ExpectedException(ex.UpdateFailedException):
|
||||||
|
try:
|
||||||
|
self.api.job_update(ctx, job_id, {"name": "job"})
|
||||||
|
except ex.UpdateFailedException as e:
|
||||||
|
self.assert_protected_resource_exception(e)
|
||||||
|
raise e
|
||||||
|
|
||||||
|
with testtools.ExpectedException(ex.DeletionFailed):
|
||||||
|
try:
|
||||||
|
self.api.job_destroy(ctx, job_id)
|
||||||
|
except ex.DeletionFailed as e:
|
||||||
|
self.assert_protected_resource_exception(e)
|
||||||
|
raise e
|
||||||
|
|
||||||
|
self.api.job_update(ctx, job_id, {"name": "job",
|
||||||
|
"is_protected": False})
|
||||||
|
|
||||||
|
def test_public_job_update_delete_from_another_tenant(self):
|
||||||
|
ctx = context.ctx()
|
||||||
|
sample = copy.deepcopy(SAMPLE_JOB)
|
||||||
|
sample['is_public'] = True
|
||||||
|
job = self.api.job_create(ctx, sample)
|
||||||
|
job_id = job["id"]
|
||||||
|
ctx.tenant_id = 'tenant_2'
|
||||||
|
|
||||||
|
with testtools.ExpectedException(ex.UpdateFailedException):
|
||||||
|
try:
|
||||||
|
self.api.job_update(ctx, job_id, {"name": "job"})
|
||||||
|
except ex.UpdateFailedException as e:
|
||||||
|
self.assert_created_in_another_tenant_exception(e)
|
||||||
|
raise e
|
||||||
|
|
||||||
|
with testtools.ExpectedException(ex.DeletionFailed):
|
||||||
|
try:
|
||||||
|
self.api.job_destroy(ctx, job_id)
|
||||||
|
except ex.DeletionFailed as e:
|
||||||
|
self.assert_created_in_another_tenant_exception(e)
|
||||||
|
raise e
|
||||||
|
|
||||||
|
|
||||||
class JobBinaryInternalTest(test_base.ConductorManagerTestCase):
|
class JobBinaryInternalTest(test_base.ConductorManagerTestCase):
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
@ -634,6 +789,55 @@ class JobBinaryInternalTest(test_base.ConductorManagerTestCase):
|
|||||||
self.api.job_binary_internal_get_all,
|
self.api.job_binary_internal_get_all,
|
||||||
ctx, **{'badfield': 'junk'})
|
ctx, **{'badfield': 'junk'})
|
||||||
|
|
||||||
|
def test_jbi_update_delete_when_protected(self):
|
||||||
|
ctx = context.ctx()
|
||||||
|
sample = copy.deepcopy(SAMPLE_JOB_BINARY_INTERNAL)
|
||||||
|
sample['is_protected'] = True
|
||||||
|
jbi = self.api.job_binary_internal_create(ctx, sample)
|
||||||
|
jbi_id = jbi["id"]
|
||||||
|
|
||||||
|
with testtools.ExpectedException(ex.UpdateFailedException):
|
||||||
|
try:
|
||||||
|
self.api.job_binary_internal_update(ctx, jbi_id,
|
||||||
|
{"name": "jbi"})
|
||||||
|
except ex.UpdateFailedException as e:
|
||||||
|
self.assert_protected_resource_exception(e)
|
||||||
|
raise e
|
||||||
|
|
||||||
|
with testtools.ExpectedException(ex.DeletionFailed):
|
||||||
|
try:
|
||||||
|
self.api.job_binary_internal_destroy(ctx, jbi_id)
|
||||||
|
except ex.DeletionFailed as e:
|
||||||
|
self.assert_protected_resource_exception(e)
|
||||||
|
raise e
|
||||||
|
|
||||||
|
self.api.job_binary_internal_update(ctx, jbi_id,
|
||||||
|
{"name": "jbi",
|
||||||
|
"is_protected": False})
|
||||||
|
|
||||||
|
def test_public_jbi_update_delete_from_another_tenant(self):
|
||||||
|
ctx = context.ctx()
|
||||||
|
sample = copy.deepcopy(SAMPLE_JOB_BINARY_INTERNAL)
|
||||||
|
sample['is_public'] = True
|
||||||
|
jbi = self.api.job_binary_internal_create(ctx, sample)
|
||||||
|
jbi_id = jbi["id"]
|
||||||
|
ctx.tenant_id = 'tenant_2'
|
||||||
|
|
||||||
|
with testtools.ExpectedException(ex.UpdateFailedException):
|
||||||
|
try:
|
||||||
|
self.api.job_binary_internal_update(ctx, jbi_id,
|
||||||
|
{"name": "jbi"})
|
||||||
|
except ex.UpdateFailedException as e:
|
||||||
|
self.assert_created_in_another_tenant_exception(e)
|
||||||
|
raise e
|
||||||
|
|
||||||
|
with testtools.ExpectedException(ex.DeletionFailed):
|
||||||
|
try:
|
||||||
|
self.api.job_binary_internal_destroy(ctx, jbi_id)
|
||||||
|
except ex.DeletionFailed as e:
|
||||||
|
self.assert_created_in_another_tenant_exception(e)
|
||||||
|
raise e
|
||||||
|
|
||||||
|
|
||||||
class JobBinaryTest(test_base.ConductorManagerTestCase):
|
class JobBinaryTest(test_base.ConductorManagerTestCase):
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
@ -767,3 +971,49 @@ class JobBinaryTest(test_base.ConductorManagerTestCase):
|
|||||||
job_ex_id = lst[0]["id"]
|
job_ex_id = lst[0]["id"]
|
||||||
new_info = {"status": edp.JOB_STATUS_PENDING}
|
new_info = {"status": edp.JOB_STATUS_PENDING}
|
||||||
self.api.job_execution_update(ctx, job_ex_id, {"info": new_info})
|
self.api.job_execution_update(ctx, job_ex_id, {"info": new_info})
|
||||||
|
|
||||||
|
def test_jb_update_delete_when_protected(self):
|
||||||
|
ctx = context.ctx()
|
||||||
|
sample = copy.deepcopy(SAMPLE_JOB_BINARY)
|
||||||
|
sample['is_protected'] = True
|
||||||
|
jb = self.api.job_binary_create(ctx, sample)
|
||||||
|
jb_id = jb["id"]
|
||||||
|
|
||||||
|
with testtools.ExpectedException(ex.UpdateFailedException):
|
||||||
|
try:
|
||||||
|
self.api.job_binary_update(ctx, jb_id, {"name": "jb"})
|
||||||
|
except ex.UpdateFailedException as e:
|
||||||
|
self.assert_protected_resource_exception(e)
|
||||||
|
raise e
|
||||||
|
|
||||||
|
with testtools.ExpectedException(ex.DeletionFailed):
|
||||||
|
try:
|
||||||
|
self.api.job_binary_destroy(ctx, jb_id)
|
||||||
|
except ex.DeletionFailed as e:
|
||||||
|
self.assert_protected_resource_exception(e)
|
||||||
|
raise e
|
||||||
|
|
||||||
|
self.api.job_binary_update(ctx, jb_id, {"name": "jb",
|
||||||
|
"is_protected": False})
|
||||||
|
|
||||||
|
def test_public_jb_update_delete_from_another_tenant(self):
|
||||||
|
ctx = context.ctx()
|
||||||
|
sample = copy.deepcopy(SAMPLE_JOB_BINARY)
|
||||||
|
sample['is_public'] = True
|
||||||
|
jb = self.api.job_binary_create(ctx, sample)
|
||||||
|
jb_id = jb["id"]
|
||||||
|
ctx.tenant_id = 'tenant_2'
|
||||||
|
|
||||||
|
with testtools.ExpectedException(ex.UpdateFailedException):
|
||||||
|
try:
|
||||||
|
self.api.job_binary_update(ctx, jb_id, {"name": "jb"})
|
||||||
|
except ex.UpdateFailedException as e:
|
||||||
|
self.assert_created_in_another_tenant_exception(e)
|
||||||
|
raise e
|
||||||
|
|
||||||
|
with testtools.ExpectedException(ex.DeletionFailed):
|
||||||
|
try:
|
||||||
|
self.api.job_binary_destroy(ctx, jb_id)
|
||||||
|
except ex.DeletionFailed as e:
|
||||||
|
self.assert_created_in_another_tenant_exception(e)
|
||||||
|
raise e
|
||||||
|
@ -262,6 +262,47 @@ class NodeGroupTemplates(test_base.ConductorManagerTestCase):
|
|||||||
if value is None:
|
if value is None:
|
||||||
self.assertIsNone(updated_ngt[prop])
|
self.assertIsNone(updated_ngt[prop])
|
||||||
|
|
||||||
|
def test_ngt_update_delete_when_protected(self):
|
||||||
|
ctx = context.ctx()
|
||||||
|
sample = copy.deepcopy(SAMPLE_NGT)
|
||||||
|
sample['is_protected'] = True
|
||||||
|
ngt = self.api.node_group_template_create(ctx, sample)
|
||||||
|
ngt_id = ngt["id"]
|
||||||
|
|
||||||
|
with testtools.ExpectedException(ex.UpdateFailedException):
|
||||||
|
self.api.node_group_template_update(ctx, ngt_id,
|
||||||
|
{"name": "tmpl"})
|
||||||
|
|
||||||
|
with testtools.ExpectedException(ex.DeletionFailed):
|
||||||
|
self.api.node_group_template_destroy(ctx, ngt_id)
|
||||||
|
|
||||||
|
self.api.node_group_template_update(ctx, ngt_id,
|
||||||
|
{"name": "tmpl",
|
||||||
|
"is_protected": False})
|
||||||
|
|
||||||
|
def test_public_ngt_update_from_another_tenant(self):
|
||||||
|
ctx = context.ctx()
|
||||||
|
sample = copy.deepcopy(SAMPLE_NGT)
|
||||||
|
sample['is_public'] = True
|
||||||
|
ngt = self.api.node_group_template_create(ctx, sample)
|
||||||
|
ngt_id = ngt["id"]
|
||||||
|
ctx.tenant_id = 'tenant_2'
|
||||||
|
|
||||||
|
with testtools.ExpectedException(ex.UpdateFailedException):
|
||||||
|
self.api.node_group_template_update(ctx, ngt_id,
|
||||||
|
{"name": "tmpl"})
|
||||||
|
|
||||||
|
def test_public_ngt_delete_from_another_tenant(self):
|
||||||
|
ctx = context.ctx()
|
||||||
|
sample = copy.deepcopy(SAMPLE_NGT)
|
||||||
|
sample['is_public'] = True
|
||||||
|
ngt = self.api.node_group_template_create(ctx, sample)
|
||||||
|
ngt_id = ngt["id"]
|
||||||
|
ctx.tenant_id = 'tenant_2'
|
||||||
|
|
||||||
|
with testtools.ExpectedException(ex.DeletionFailed):
|
||||||
|
self.api.node_group_template_destroy(ctx, ngt_id)
|
||||||
|
|
||||||
|
|
||||||
class ClusterTemplates(test_base.ConductorManagerTestCase):
|
class ClusterTemplates(test_base.ConductorManagerTestCase):
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
@ -459,3 +500,51 @@ class ClusterTemplates(test_base.ConductorManagerTestCase):
|
|||||||
self.assertEqual([], updated_clt[prop])
|
self.assertEqual([], updated_clt[prop])
|
||||||
else:
|
else:
|
||||||
self.assertIsNone(updated_clt[prop])
|
self.assertIsNone(updated_clt[prop])
|
||||||
|
|
||||||
|
def test_clt_update_delete_when_protected(self):
|
||||||
|
ctx = context.ctx()
|
||||||
|
sample = copy.deepcopy(SAMPLE_CLT)
|
||||||
|
sample['is_protected'] = True
|
||||||
|
clt = self.api.cluster_template_create(ctx, sample)
|
||||||
|
clt_id = clt["id"]
|
||||||
|
|
||||||
|
with testtools.ExpectedException(ex.UpdateFailedException):
|
||||||
|
try:
|
||||||
|
self.api.cluster_template_update(ctx, clt_id, {"name": "tmpl"})
|
||||||
|
except ex.UpdateFailedException as e:
|
||||||
|
self.assert_protected_resource_exception(e)
|
||||||
|
raise e
|
||||||
|
|
||||||
|
with testtools.ExpectedException(ex.DeletionFailed):
|
||||||
|
try:
|
||||||
|
self.api.cluster_template_destroy(ctx, clt_id)
|
||||||
|
except ex.DeletionFailed as e:
|
||||||
|
self.assert_protected_resource_exception(e)
|
||||||
|
raise e
|
||||||
|
|
||||||
|
self.api.cluster_template_update(ctx, clt_id,
|
||||||
|
{"name": "tmpl",
|
||||||
|
"is_protected": False})
|
||||||
|
|
||||||
|
def test_public_clt_update_delete_from_another_tenant(self):
|
||||||
|
ctx = context.ctx()
|
||||||
|
sample = copy.deepcopy(SAMPLE_CLT)
|
||||||
|
sample['is_public'] = True
|
||||||
|
clt = self.api.cluster_template_create(ctx, sample)
|
||||||
|
clt_id = clt["id"]
|
||||||
|
ctx.tenant_id = 'tenant_2'
|
||||||
|
|
||||||
|
with testtools.ExpectedException(ex.UpdateFailedException):
|
||||||
|
try:
|
||||||
|
self.api.cluster_template_update(ctx, clt_id,
|
||||||
|
{"name": "tmpl"})
|
||||||
|
except ex.UpdateFailedException as e:
|
||||||
|
self.assert_created_in_another_tenant_exception(e)
|
||||||
|
raise e
|
||||||
|
|
||||||
|
with testtools.ExpectedException(ex.DeletionFailed):
|
||||||
|
try:
|
||||||
|
self.api.cluster_template_destroy(ctx, clt_id)
|
||||||
|
except ex.DeletionFailed as e:
|
||||||
|
self.assert_created_in_another_tenant_exception(e)
|
||||||
|
raise e
|
||||||
|
@ -36,6 +36,7 @@ class TemplateDeleteTestCase(base.ConductorManagerTestCase):
|
|||||||
|
|
||||||
def test_node_group_template_delete_by_id(self):
|
def test_node_group_template_delete_by_id(self):
|
||||||
self.logger.clear_log()
|
self.logger.clear_log()
|
||||||
|
self.setup_context(tenant_id=None)
|
||||||
ctx = context.ctx()
|
ctx = context.ctx()
|
||||||
t = self.api.node_group_template_create(ctx, c.SAMPLE_NGT)
|
t = self.api.node_group_template_create(ctx, c.SAMPLE_NGT)
|
||||||
|
|
||||||
@ -53,6 +54,7 @@ class TemplateDeleteTestCase(base.ConductorManagerTestCase):
|
|||||||
|
|
||||||
def test_node_group_template_delete_by_id_skipped(self):
|
def test_node_group_template_delete_by_id_skipped(self):
|
||||||
self.logger.clear_log()
|
self.logger.clear_log()
|
||||||
|
self.setup_context(tenant_id=None)
|
||||||
ctx = context.ctx()
|
ctx = context.ctx()
|
||||||
template_values = copy.copy(c.SAMPLE_NGT)
|
template_values = copy.copy(c.SAMPLE_NGT)
|
||||||
template_values["is_default"] = False
|
template_values["is_default"] = False
|
||||||
@ -163,6 +165,7 @@ class TemplateDeleteTestCase(base.ConductorManagerTestCase):
|
|||||||
|
|
||||||
def test_cluster_template_delete_by_id(self):
|
def test_cluster_template_delete_by_id(self):
|
||||||
self.logger.clear_log()
|
self.logger.clear_log()
|
||||||
|
self.setup_context(tenant_id=None)
|
||||||
ctx = context.ctx()
|
ctx = context.ctx()
|
||||||
t = self.api.cluster_template_create(ctx, c.SAMPLE_CLT)
|
t = self.api.cluster_template_create(ctx, c.SAMPLE_CLT)
|
||||||
|
|
||||||
|
@ -226,7 +226,9 @@ class TestJobUpdateValidation(u.ValidationTestCase):
|
|||||||
def test_job_update(self):
|
def test_job_update(self):
|
||||||
data = {
|
data = {
|
||||||
'name': 'job',
|
'name': 'job',
|
||||||
'description': 'very fast job'
|
'description': 'very fast job',
|
||||||
|
'is_public': False,
|
||||||
|
'is_protected': False
|
||||||
}
|
}
|
||||||
self._assert_types(data)
|
self._assert_types(data)
|
||||||
|
|
||||||
|
@ -45,7 +45,9 @@ class TestJobBinaryInternalUpdateValidation(u.ValidationTestCase):
|
|||||||
|
|
||||||
def test_job_binary_internal_update_types(self):
|
def test_job_binary_internal_update_types(self):
|
||||||
data = {
|
data = {
|
||||||
'name': 'jb'
|
'name': 'jb',
|
||||||
|
'is_public': False,
|
||||||
|
'is_protected': False
|
||||||
}
|
}
|
||||||
self._assert_types(data)
|
self._assert_types(data)
|
||||||
|
|
||||||
@ -53,7 +55,9 @@ class TestJobBinaryInternalUpdateValidation(u.ValidationTestCase):
|
|||||||
self._assert_create_object_validation(data={'name': 'jb'})
|
self._assert_create_object_validation(data={'name': 'jb'})
|
||||||
|
|
||||||
self._assert_create_object_validation(
|
self._assert_create_object_validation(
|
||||||
data={'id': '1'},
|
data={'id': '1',
|
||||||
|
'is_public': False,
|
||||||
|
'is_protected': False},
|
||||||
bad_req_i=(1, "VALIDATION_ERROR",
|
bad_req_i=(1, "VALIDATION_ERROR",
|
||||||
"Additional properties are not allowed "
|
"Additional properties are not allowed "
|
||||||
"('id' was unexpected)"))
|
"('id' was unexpected)"))
|
||||||
|
@ -17,7 +17,9 @@ import uuid
|
|||||||
|
|
||||||
import mock
|
import mock
|
||||||
import six
|
import six
|
||||||
|
import testtools
|
||||||
|
|
||||||
|
from sahara import exceptions as ex
|
||||||
from sahara import main
|
from sahara import main
|
||||||
from sahara.service import api
|
from sahara.service import api
|
||||||
from sahara.service.validations.edp import job_execution as je
|
from sahara.service.validations.edp import job_execution as je
|
||||||
@ -260,10 +262,63 @@ class TestJobExecUpdateValidation(u.ValidationTestCase):
|
|||||||
self.scheme = je_schema.JOB_EXEC_UPDATE_SCHEMA
|
self.scheme = je_schema.JOB_EXEC_UPDATE_SCHEMA
|
||||||
|
|
||||||
def test_job_execution_update_types(self):
|
def test_job_execution_update_types(self):
|
||||||
data = {}
|
data = {
|
||||||
|
'is_public': False,
|
||||||
|
'is_protected': False
|
||||||
|
}
|
||||||
self._assert_types(data)
|
self._assert_types(data)
|
||||||
|
|
||||||
def test_job_execution_update_nothing_required(self):
|
def test_job_execution_update_nothing_required(self):
|
||||||
self._assert_create_object_validation(
|
self._assert_create_object_validation(
|
||||||
data={}
|
data={
|
||||||
|
'is_public': False,
|
||||||
|
'is_protected': False
|
||||||
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestJobExecutionCancelDeleteValidation(u.ValidationTestCase):
|
||||||
|
def setUp(self):
|
||||||
|
super(TestJobExecutionCancelDeleteValidation, self).setUp()
|
||||||
|
self.setup_context(tenant_id='tenant1')
|
||||||
|
|
||||||
|
@mock.patch('sahara.conductor.api.LocalApi.job_execution_get')
|
||||||
|
def test_je_cancel_delete_when_protected(self, get_je_p):
|
||||||
|
|
||||||
|
job_exec = mock.Mock(id='123', tenant_id='tenant1', is_protected=True)
|
||||||
|
get_je_p.return_value = job_exec
|
||||||
|
|
||||||
|
with testtools.ExpectedException(ex.CancelingFailed):
|
||||||
|
try:
|
||||||
|
je.check_job_execution_cancel(job_exec)
|
||||||
|
except ex.CancelingFailed as e:
|
||||||
|
self.assert_protected_resource_exception(e)
|
||||||
|
raise e
|
||||||
|
|
||||||
|
with testtools.ExpectedException(ex.DeletionFailed):
|
||||||
|
try:
|
||||||
|
je.check_job_execution_delete(job_exec)
|
||||||
|
except ex.DeletionFailed as e:
|
||||||
|
self.assert_protected_resource_exception(e)
|
||||||
|
raise e
|
||||||
|
|
||||||
|
@mock.patch('sahara.conductor.api.LocalApi.job_execution_get')
|
||||||
|
def test_public_je_cancel_delete_from_another_tenant(self, get_je_p):
|
||||||
|
|
||||||
|
job_exec = mock.Mock(id='123', tenant_id='tenant2', is_protected=False,
|
||||||
|
is_public=True)
|
||||||
|
get_je_p.return_value = job_exec
|
||||||
|
|
||||||
|
with testtools.ExpectedException(ex.CancelingFailed):
|
||||||
|
try:
|
||||||
|
je.check_job_execution_cancel(job_exec)
|
||||||
|
except ex.CancelingFailed as e:
|
||||||
|
self.assert_created_in_another_tenant_exception(e)
|
||||||
|
raise e
|
||||||
|
|
||||||
|
with testtools.ExpectedException(ex.DeletionFailed):
|
||||||
|
try:
|
||||||
|
je.check_job_execution_delete(job_exec)
|
||||||
|
except ex.DeletionFailed as e:
|
||||||
|
self.assert_created_in_another_tenant_exception(e)
|
||||||
|
raise e
|
||||||
|
@ -0,0 +1,54 @@
|
|||||||
|
# Copyright (c) 2013 Mirantis Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
# implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import mock
|
||||||
|
import testtools
|
||||||
|
|
||||||
|
from sahara import exceptions as ex
|
||||||
|
from sahara.service.validations import clusters as c_val
|
||||||
|
from sahara.tests.unit.service.validation import utils as u
|
||||||
|
from sahara.tests.unit import testutils as tu
|
||||||
|
|
||||||
|
|
||||||
|
class TestClusterDeleteValidation(u.ValidationTestCase):
|
||||||
|
def setUp(self):
|
||||||
|
super(TestClusterDeleteValidation, self).setUp()
|
||||||
|
self.setup_context(tenant_id='tenant1')
|
||||||
|
|
||||||
|
@mock.patch('sahara.service.api.get_cluster')
|
||||||
|
def test_cluster_delete_when_protected(self, get_cluster_p):
|
||||||
|
cluster = tu.create_cluster("cluster1", "tenant1", "vanilla",
|
||||||
|
"1.2.1", ['ng1'], is_protected=True)
|
||||||
|
get_cluster_p.return_value = cluster
|
||||||
|
|
||||||
|
with testtools.ExpectedException(ex.DeletionFailed):
|
||||||
|
try:
|
||||||
|
c_val.check_cluster_delete(cluster.id)
|
||||||
|
except ex.DeletionFailed as e:
|
||||||
|
self.assert_protected_resource_exception(e)
|
||||||
|
raise e
|
||||||
|
|
||||||
|
@mock.patch('sahara.service.api.get_cluster')
|
||||||
|
def test_public_cluster_delete_from_another_tenant(self, get_cluster_p):
|
||||||
|
cluster = tu.create_cluster("cluster1", "tenant2", "vanilla",
|
||||||
|
"1.2.1", ['ng1'], is_public=True)
|
||||||
|
get_cluster_p.return_value = cluster
|
||||||
|
|
||||||
|
with testtools.ExpectedException(ex.DeletionFailed):
|
||||||
|
try:
|
||||||
|
c_val.check_cluster_delete(cluster.id)
|
||||||
|
except ex.DeletionFailed as e:
|
||||||
|
self.assert_created_in_another_tenant_exception(e)
|
||||||
|
raise e
|
@ -40,6 +40,7 @@ class TestScalingValidation(u.ValidationTestCase):
|
|||||||
self._create_object_fun = mock.Mock()
|
self._create_object_fun = mock.Mock()
|
||||||
self.duplicates_detected = ("Duplicates in node group names are"
|
self.duplicates_detected = ("Duplicates in node group names are"
|
||||||
" detected: ['a']")
|
" detected: ['a']")
|
||||||
|
self.setup_context(tenant_id='tenant1')
|
||||||
|
|
||||||
@mock.patch('sahara.service.api.get_cluster')
|
@mock.patch('sahara.service.api.get_cluster')
|
||||||
@mock.patch('sahara.plugins.base.PluginManager.get_plugin')
|
@mock.patch('sahara.plugins.base.PluginManager.get_plugin')
|
||||||
@ -111,7 +112,7 @@ class TestScalingValidation(u.ValidationTestCase):
|
|||||||
def test_check_cluster_scaling_add_ng(self, ops):
|
def test_check_cluster_scaling_add_ng(self, ops):
|
||||||
ops.get_engine_type_and_version.return_value = "direct.1.1"
|
ops.get_engine_type_and_version.return_value = "direct.1.1"
|
||||||
ng1 = tu.make_ng_dict('ng', '42', ['namenode'], 1)
|
ng1 = tu.make_ng_dict('ng', '42', ['namenode'], 1)
|
||||||
cluster = tu.create_cluster("test-cluster", "tenant", "vanilla",
|
cluster = tu.create_cluster("test-cluster", "tenant1", "vanilla",
|
||||||
"1.2.1", [ng1], status='Active',
|
"1.2.1", [ng1], status='Active',
|
||||||
id='12321')
|
id='12321')
|
||||||
data = {
|
data = {
|
||||||
@ -329,6 +330,7 @@ class TestScalingValidation(u.ValidationTestCase):
|
|||||||
|
|
||||||
@mock.patch("sahara.service.api.OPS")
|
@mock.patch("sahara.service.api.OPS")
|
||||||
def test_cluster_scaling_v_right_data(self, ops):
|
def test_cluster_scaling_v_right_data(self, ops):
|
||||||
|
self.setup_context(tenant_id='t')
|
||||||
ops.get_engine_type_and_version.return_value = "direct.1.1"
|
ops.get_engine_type_and_version.return_value = "direct.1.1"
|
||||||
self._create_object_fun = c_s.check_cluster_scaling
|
self._create_object_fun = c_s.check_cluster_scaling
|
||||||
|
|
||||||
|
@ -24,7 +24,9 @@ from sahara.tests.unit.service.validation import utils as u
|
|||||||
SAMPLE_DATA = {
|
SAMPLE_DATA = {
|
||||||
'name': 'testname',
|
'name': 'testname',
|
||||||
'plugin_name': 'vanilla',
|
'plugin_name': 'vanilla',
|
||||||
'hadoop_version': '1.2.1'
|
'hadoop_version': '1.2.1',
|
||||||
|
'is_public': False,
|
||||||
|
'is_protected': False
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -31,7 +31,9 @@ class TestClusterUpdateValidation(u.ValidationTestCase):
|
|||||||
def test_cluster_update_types(self):
|
def test_cluster_update_types(self):
|
||||||
self._assert_types({
|
self._assert_types({
|
||||||
'name': 'cluster',
|
'name': 'cluster',
|
||||||
'description': 'very big cluster'
|
'description': 'very big cluster',
|
||||||
|
'is_public': False,
|
||||||
|
'is_protected': False
|
||||||
})
|
})
|
||||||
|
|
||||||
def test_cluster_update_nothing_required(self):
|
def test_cluster_update_nothing_required(self):
|
||||||
@ -43,7 +45,9 @@ class TestClusterUpdateValidation(u.ValidationTestCase):
|
|||||||
self._assert_create_object_validation(
|
self._assert_create_object_validation(
|
||||||
data={
|
data={
|
||||||
'name': 'cluster',
|
'name': 'cluster',
|
||||||
'description': 'very big cluster'
|
'description': 'very big cluster',
|
||||||
|
'is_public': False,
|
||||||
|
'is_protected': False
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -144,7 +144,9 @@ class TestNGTemplateCreateValidation(u.ValidationTestCase):
|
|||||||
'auto_security_group': False,
|
'auto_security_group': False,
|
||||||
'availability_zone': 'here',
|
'availability_zone': 'here',
|
||||||
'is_proxy_gateway': False,
|
'is_proxy_gateway': False,
|
||||||
'volume_local_to_instance': False
|
'volume_local_to_instance': False,
|
||||||
|
'is_public': False,
|
||||||
|
'is_protected': False
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -173,7 +175,9 @@ class TestNGTemplateCreateValidation(u.ValidationTestCase):
|
|||||||
'auto_security_group': None,
|
'auto_security_group': None,
|
||||||
'availability_zone': None,
|
'availability_zone': None,
|
||||||
'is_proxy_gateway': None,
|
'is_proxy_gateway': None,
|
||||||
'volume_local_to_instance': None
|
'volume_local_to_instance': None,
|
||||||
|
'is_public': None,
|
||||||
|
'is_protected': None
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -43,7 +43,9 @@ SAMPLE_DATA = {
|
|||||||
'volumes_per_node': 2,
|
'volumes_per_node': 2,
|
||||||
'volumes_size': 10,
|
'volumes_size': 10,
|
||||||
'description': 'test node template',
|
'description': 'test node template',
|
||||||
'floating_ip_pool': 'd9a3bebc-f788-4b81-9a93-aa048022c1ca'
|
'floating_ip_pool': 'd9a3bebc-f788-4b81-9a93-aa048022c1ca',
|
||||||
|
'is_public': False,
|
||||||
|
'is_protected': False
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -431,3 +431,9 @@ class ValidationTestCase(base.SaharaTestCase):
|
|||||||
"'813fe450-40d2-4acc-ade5-ea753a1bd5bc' "
|
"'813fe450-40d2-4acc-ade5-ea753a1bd5bc' "
|
||||||
"doesn't contain required tags: "
|
"doesn't contain required tags: "
|
||||||
"['1.2.1']"))
|
"['1.2.1']"))
|
||||||
|
|
||||||
|
def assert_protected_resource_exception(self, ex):
|
||||||
|
self.assertIn("marked as protected", six.text_type(ex))
|
||||||
|
|
||||||
|
def assert_created_in_another_tenant_exception(self, ex):
|
||||||
|
self.assertIn("wasn't created in this tenant", six.text_type(ex))
|
||||||
|
@ -25,7 +25,7 @@ def create_cluster(name, tenant, plugin, version, node_groups, **kwargs):
|
|||||||
dct = {'id': six.text_type(uuid.uuid4()), 'name': name,
|
dct = {'id': six.text_type(uuid.uuid4()), 'name': name,
|
||||||
'tenant_id': tenant, 'plugin_name': plugin,
|
'tenant_id': tenant, 'plugin_name': plugin,
|
||||||
'hadoop_version': version, 'node_groups': node_groups,
|
'hadoop_version': version, 'node_groups': node_groups,
|
||||||
'cluster_configs': {}, "sahara_info": {}}
|
'cluster_configs': {}, "sahara_info": {}, 'is_protected': False}
|
||||||
dct.update(kwargs)
|
dct.update(kwargs)
|
||||||
return r.ClusterResource(dct)
|
return r.ClusterResource(dct)
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user