Merge "Replace DeploymentEngine with EnvManager"
This commit is contained in:
commit
2baa4040a3
141
rally/api.py
141
rally/api.py
@ -23,7 +23,6 @@ import traceback
|
||||
|
||||
import jinja2
|
||||
import jinja2.meta
|
||||
import jsonschema
|
||||
from oslo_config import cfg
|
||||
import requests
|
||||
from requests.packages import urllib3
|
||||
@ -35,7 +34,6 @@ from rally.common.plugin import discover
|
||||
from rally.common import utils
|
||||
from rally.common import version as rally_version
|
||||
from rally import consts
|
||||
from rally.deployment import engine as deploy_engine
|
||||
from rally import exceptions
|
||||
from rally.task import engine
|
||||
from rally.task import exporter as texporter
|
||||
@ -73,45 +71,38 @@ class _Deployment(APIGroup):
|
||||
|
||||
# NOTE(andreykurilin): the following transformation is a preparatory
|
||||
# step for further refactoring (it will be done soon).
|
||||
print_warning = True
|
||||
if "type" not in config:
|
||||
# it looks like a new format! wow!
|
||||
config = {"type": "ExistingCloud",
|
||||
"creds": config}
|
||||
print_warning = False
|
||||
print_warning = False
|
||||
|
||||
extras = {}
|
||||
if "type" in config:
|
||||
if config["type"] != "ExistingCloud":
|
||||
raise exceptions.RallyException(
|
||||
"You are using deployment type which doesn't exist. Please"
|
||||
" check the latest documentation and fix deployment "
|
||||
"config.")
|
||||
|
||||
config = config["creds"]
|
||||
extras = config.get("extra", {})
|
||||
print_warning = True
|
||||
|
||||
try:
|
||||
deployment = objects.Deployment(name=name, config=config)
|
||||
deployment = objects.Deployment(name=name, config=config,
|
||||
extras=extras)
|
||||
except exceptions.DBRecordExists:
|
||||
if logging.is_debug():
|
||||
LOG.exception("Deployment with such name exists")
|
||||
raise
|
||||
|
||||
deployer = deploy_engine.Engine.get_engine(
|
||||
deployment["config"]["type"], deployment)
|
||||
try:
|
||||
deployer.validate()
|
||||
except jsonschema.ValidationError:
|
||||
LOG.error("Deployment %s: Schema validation error." %
|
||||
deployment["uuid"])
|
||||
deployment.update_status(consts.DeployStatus.DEPLOY_FAILED)
|
||||
raise
|
||||
|
||||
if print_warning and config.get("type", "") == "ExistingCloud":
|
||||
if print_warning:
|
||||
# credentials are stored in the list, but it contains one item.
|
||||
new_conf = dict(
|
||||
(name, cred[0])
|
||||
for name, cred in deployer._get_creds(config).items())
|
||||
new_conf = deployment.spec
|
||||
LOG.warning(
|
||||
"The used config schema is deprecated since Rally 0.10.0. "
|
||||
"The new one is much simpler, try it now:\n%s"
|
||||
% json.dumps(new_conf, indent=4)
|
||||
)
|
||||
|
||||
with deployer:
|
||||
credentials = deployer.make_deploy()
|
||||
deployment.update_credentials(credentials)
|
||||
return deployment
|
||||
return deployment
|
||||
|
||||
def create(self, config, name):
|
||||
return self._create(config, name).to_dict()
|
||||
@ -121,26 +112,11 @@ class _Deployment(APIGroup):
|
||||
|
||||
:param deployment: UUID or name of the deployment
|
||||
"""
|
||||
# TODO(akscram): We have to be sure that there are no running
|
||||
# tasks for this deployment.
|
||||
# TODO(akscram): Check that the deployment have got a status that
|
||||
# is equal to "*->finished" or "deploy->inconsistent".
|
||||
deployment = objects.Deployment.get(deployment)
|
||||
try:
|
||||
deployer = deploy_engine.Engine.get_engine(
|
||||
deployment["config"]["type"], deployment)
|
||||
with deployer:
|
||||
deployer.make_cleanup()
|
||||
except exceptions.PluginNotFound:
|
||||
LOG.info("Deployment %s will be deleted despite exception"
|
||||
% deployment["uuid"])
|
||||
|
||||
for verifier in self.api.verifier.list():
|
||||
self.api.verifier.delete(verifier_id=verifier["name"],
|
||||
deployment_id=deployment["name"],
|
||||
force=True)
|
||||
deploy = objects.Deployment.get(deployment)
|
||||
|
||||
deployment.delete()
|
||||
deploy.env_obj.destroy(skip_cleanup=True)
|
||||
deploy.env_obj.delete()
|
||||
|
||||
def recreate(self, deployment, config=None):
|
||||
"""Performs a cleanup and then makes a deployment again.
|
||||
@ -149,47 +125,8 @@ class _Deployment(APIGroup):
|
||||
:param config: an optional dict with deployment config to update before
|
||||
redeploy
|
||||
"""
|
||||
deployment = objects.Deployment.get(deployment)
|
||||
deployer = deploy_engine.Engine.get_engine(
|
||||
deployment["config"]["type"], deployment)
|
||||
|
||||
print_warning = True
|
||||
if config and "type" not in config:
|
||||
# it looks like a new format! wow!
|
||||
config = {"type": "ExistingCloud",
|
||||
"creds": config}
|
||||
print_warning = False
|
||||
|
||||
if config:
|
||||
if deployment["config"]["type"] != config["type"]:
|
||||
raise exceptions.RallyException(
|
||||
"Can't change deployment type.")
|
||||
try:
|
||||
deployer.validate(config)
|
||||
except jsonschema.ValidationError:
|
||||
LOG.error("Config schema validation error.")
|
||||
raise
|
||||
|
||||
if (config and print_warning and
|
||||
config.get("type", "") == "ExistingCloud"):
|
||||
# credentials are stored in the list, but it contains one item.
|
||||
new_conf = dict(
|
||||
(name, cred[0])
|
||||
for name, cred in deployer._get_creds(config).items())
|
||||
LOG.warning(
|
||||
"The used config schema is deprecated since Rally 0.10.0. "
|
||||
"The new one is much simpler, try it now:\n%s"
|
||||
% json.dumps(new_conf, indent=4)
|
||||
)
|
||||
|
||||
with deployer:
|
||||
deployer.make_cleanup()
|
||||
|
||||
if config:
|
||||
deployment.update_config(config)
|
||||
|
||||
credentials = deployer.make_deploy()
|
||||
deployment.update_credentials(credentials)
|
||||
raise exceptions.RallyException("Sorry, but recreate method of "
|
||||
"deployments is temporary disabled.")
|
||||
|
||||
def _get(self, deployment):
|
||||
"""Get the deployment.
|
||||
@ -200,24 +137,7 @@ class _Deployment(APIGroup):
|
||||
return objects.Deployment.get(deployment)
|
||||
|
||||
def get(self, deployment):
|
||||
deployment = self._get(deployment).to_dict()
|
||||
if deployment["config"].get("type", "") == "ExistingCloud":
|
||||
deployment_creds = {}
|
||||
if "creds" not in deployment["config"]:
|
||||
extra = deployment["config"].pop("extra", None)
|
||||
deployment["config"] = {
|
||||
"type": deployment["config"].pop("type"),
|
||||
"creds": {"openstack": deployment["config"]}
|
||||
}
|
||||
if extra is not None:
|
||||
deployment["config"]["extra"] = extra
|
||||
for platform, creds in deployment["config"]["creds"].items():
|
||||
if isinstance(creds, dict):
|
||||
deployment_creds[platform] = creds
|
||||
else:
|
||||
deployment_creds[platform] = creds[0]
|
||||
deployment["config"] = deployment_creds
|
||||
return deployment
|
||||
return self._get(deployment).to_dict()
|
||||
|
||||
def service_list(self, deployment):
|
||||
"""Get the services list.
|
||||
@ -244,6 +164,7 @@ class _Deployment(APIGroup):
|
||||
:returns: Service list
|
||||
"""
|
||||
result = {}
|
||||
|
||||
all_credentials = self._get(deployment).get_all_credentials()
|
||||
for platform in all_credentials:
|
||||
result[platform] = []
|
||||
@ -398,7 +319,7 @@ class _Task(APIGroup):
|
||||
uuid=deployment["uuid"],
|
||||
status=deployment["status"])
|
||||
|
||||
return objects.Task(deployment_uuid=deployment["uuid"],
|
||||
return objects.Task(env_uuid=deployment["uuid"],
|
||||
tags=tags).to_dict()
|
||||
|
||||
def validate(self, deployment, config, task_instance=None, task=None):
|
||||
@ -421,7 +342,7 @@ class _Task(APIGroup):
|
||||
task = objects.Task.get(task)
|
||||
deployment = task["deployment_uuid"]
|
||||
else:
|
||||
task = objects.Task(deployment_uuid=deployment, temporary=True)
|
||||
task = objects.Task(env_uuid=deployment, temporary=True)
|
||||
deployment = objects.Deployment.get(deployment)
|
||||
|
||||
try:
|
||||
@ -490,11 +411,7 @@ class _Task(APIGroup):
|
||||
LOG.info("Run Task %s against Deployment %s"
|
||||
% (task["uuid"], deployment["uuid"]))
|
||||
|
||||
try:
|
||||
task_engine.run()
|
||||
except Exception:
|
||||
deployment.update_status(consts.DeployStatus.DEPLOY_INCONSISTENT)
|
||||
raise
|
||||
task_engine.run()
|
||||
|
||||
return task["uuid"], task.get_status(task["uuid"])
|
||||
|
||||
@ -563,7 +480,7 @@ class _Task(APIGroup):
|
||||
uuid=deployment["uuid"],
|
||||
status=deployment["status"])
|
||||
|
||||
task_inst = objects.Task(deployment_uuid=deployment["uuid"],
|
||||
task_inst = objects.Task(env_uuid=deployment["uuid"],
|
||||
tags=tags)
|
||||
task_inst.update_status(consts.TaskStatus.RUNNING)
|
||||
for subtask in task_results["subtasks"]:
|
||||
|
@ -150,19 +150,18 @@ def task_update_status(task_uuid, status, allowed_statuses):
|
||||
status)
|
||||
|
||||
|
||||
def task_list(status=None, deployment=None, tags=None):
|
||||
def task_list(status=None, env=None, tags=None):
|
||||
"""Get a list of tasks.
|
||||
|
||||
:param status: Task status to filter the returned list on. If set to
|
||||
None, all the tasks will be returned.
|
||||
:param deployment: Deployment UUID to filter the returned list on.
|
||||
If set to None, tasks from all deployments will be
|
||||
returned.
|
||||
None, all the tasks will be returned.
|
||||
:param env: Environment UUID to filter the returned list on.
|
||||
If set to None, tasks from all environments will be returned.
|
||||
:param tags: A list of tags to filter tasks by.
|
||||
:returns: A list of dicts with data on the tasks.
|
||||
"""
|
||||
return get_impl().task_list(status=status,
|
||||
deployment=deployment,
|
||||
env=env,
|
||||
tags=tags)
|
||||
|
||||
|
||||
@ -277,91 +276,6 @@ def workload_set_results(workload_uuid, subtask_uuid, task_uuid, load_duration,
|
||||
contexts_results=contexts_results)
|
||||
|
||||
|
||||
def deployment_create(values):
|
||||
"""Create a deployment from the values dictionary.
|
||||
|
||||
:param values: dict with record values on the deployment.
|
||||
:returns: a dict with data on the deployment.
|
||||
"""
|
||||
return get_impl().deployment_create(values)
|
||||
|
||||
|
||||
def deployment_delete(uuid):
|
||||
"""Delete a deployment by UUID.
|
||||
|
||||
:param uuid: UUID of the deployment.
|
||||
:raises DBRecordNotFound: if the deployment does not exist.
|
||||
:raises DBConflict: if deployment cant be deleted
|
||||
"""
|
||||
return get_impl().deployment_delete(uuid)
|
||||
|
||||
|
||||
def deployment_get(deployment):
|
||||
"""Get a deployment by UUID.
|
||||
|
||||
:param deployment: UUID or name of the deployment.
|
||||
:raises DBRecordNotFound: if the deployment does not exist.
|
||||
:returns: a dict with data on the deployment.
|
||||
"""
|
||||
return get_impl().deployment_get(deployment)
|
||||
|
||||
|
||||
def deployment_update(uuid, values):
|
||||
"""Update a deployment by values.
|
||||
|
||||
:param uuid: UUID of the deployment.
|
||||
:param values: dict with items to update.
|
||||
:raises DBRecordNotFound: if the deployment does not exist.
|
||||
:returns: a dict with data on the deployment.
|
||||
"""
|
||||
return get_impl().deployment_update(uuid, values)
|
||||
|
||||
|
||||
def deployment_list(status=None, parent_uuid=None, name=None):
|
||||
"""Get list of deployments.
|
||||
|
||||
:param status: if None returns any deployments with any status.
|
||||
:param parent_uuid: filter by parent. If None, return only "root"
|
||||
deployments.
|
||||
:param name: name of deployment.
|
||||
:returns: a list of dicts with data on the deployments.
|
||||
"""
|
||||
return get_impl().deployment_list(status=status, parent_uuid=parent_uuid,
|
||||
name=name)
|
||||
|
||||
|
||||
def resource_create(values):
|
||||
"""Create a resource from the values dictionary.
|
||||
|
||||
:param values: a dict with data on the resource.
|
||||
:returns: a dict with updated data on the resource.
|
||||
"""
|
||||
return get_impl().resource_create(values)
|
||||
|
||||
|
||||
def resource_get_all(deployment_uuid, provider_name=None, type=None):
|
||||
"""Return resources of a deployment.
|
||||
|
||||
:param deployment_uuid: filter by uuid of a deployment
|
||||
:param provider_name: filter by provider_name, if is None, then
|
||||
return all providers
|
||||
:param type: filter by type, if is None, then return all types
|
||||
:returns: a list of dicts with data on a resource
|
||||
"""
|
||||
return get_impl().resource_get_all(deployment_uuid,
|
||||
provider_name=provider_name,
|
||||
type=type)
|
||||
|
||||
|
||||
def resource_delete(id):
|
||||
"""Delete a resource.
|
||||
|
||||
:param id: ID of a resource.
|
||||
:raises DBRecordNotFound: if the resource does not exist.
|
||||
"""
|
||||
return get_impl().resource_delete(id)
|
||||
|
||||
|
||||
def env_get(uuid_or_name):
|
||||
"""Returns envs with corresponding uuid or name."""
|
||||
return get_impl().env_get(uuid_or_name)
|
||||
@ -481,18 +395,19 @@ def verifier_update(verifier_id, **properties):
|
||||
return get_impl().verifier_update(verifier_id, properties)
|
||||
|
||||
|
||||
def verification_create(verifier_uuid, deployment_uuid, tags=None,
|
||||
run_args=None):
|
||||
def verification_create(verifier_uuid, env, tags=None, run_args=None):
|
||||
"""Create a verification record.
|
||||
|
||||
:param verifier_uuid: verification UUID
|
||||
:param deployment_uuid: deployment UUID
|
||||
:param env: Environment UUID
|
||||
:param tags: a list of tags to assign them to verification
|
||||
:param run_args: a dict with run arguments for verification
|
||||
:returns: a dict with verification data
|
||||
"""
|
||||
return get_impl().verification_create(verifier_uuid, deployment_uuid,
|
||||
tags, run_args)
|
||||
return get_impl().verification_create(verifier_uuid,
|
||||
env=env,
|
||||
tags=tags,
|
||||
run_args=run_args)
|
||||
|
||||
|
||||
def verification_get(verification_uuid):
|
||||
@ -505,18 +420,20 @@ def verification_get(verification_uuid):
|
||||
return get_impl().verification_get(verification_uuid)
|
||||
|
||||
|
||||
def verification_list(verifier_id=None, deployment_id=None, tags=None,
|
||||
def verification_list(verifier_id=None, env=None, tags=None,
|
||||
status=None):
|
||||
"""List all verification records.
|
||||
|
||||
:param verifier_id: verifier name or UUID to filter verifications by
|
||||
:param deployment_id: deployment name or UUID to filter verifications by
|
||||
:param env: Environment name or UUID to filter verifications by
|
||||
:param tags: tags to filter verifications by
|
||||
:param status: status to filter verifications by
|
||||
:returns: a list of dicts with verifications data
|
||||
"""
|
||||
return get_impl().verification_list(verifier_id, deployment_id, tags,
|
||||
status)
|
||||
return get_impl().verification_list(verifier_id,
|
||||
env=env,
|
||||
tags=tags,
|
||||
status=status)
|
||||
|
||||
|
||||
def verification_delete(verification_uuid):
|
||||
|
@ -328,7 +328,7 @@ class Connection(object):
|
||||
return result
|
||||
|
||||
@serialize
|
||||
def task_list(self, status=None, deployment=None, tags=None):
|
||||
def task_list(self, status=None, env=None, tags=None):
|
||||
session = get_session()
|
||||
tasks = []
|
||||
with session.begin():
|
||||
@ -337,9 +337,8 @@ class Connection(object):
|
||||
filters = {}
|
||||
if status is not None:
|
||||
filters["status"] = status
|
||||
if deployment is not None:
|
||||
filters["deployment_uuid"] = self.deployment_get(
|
||||
deployment)["uuid"]
|
||||
if env is not None:
|
||||
filters["env_uuid"] = self.env_get(env)["uuid"]
|
||||
if filters:
|
||||
query = query.filter_by(**filters)
|
||||
|
||||
@ -657,14 +656,18 @@ class Connection(object):
|
||||
def env_delete_cascade(self, uuid):
|
||||
session = get_session()
|
||||
with session.begin():
|
||||
(self.model_query(models.Task, session=session)
|
||||
.filter_by(env_uuid=uuid)
|
||||
.delete())
|
||||
(self.model_query(models.Verification, session=session)
|
||||
.filter_by(env_uuid=uuid)
|
||||
.delete())
|
||||
(self.model_query(models.Platform, session=session)
|
||||
.filter_by(env_uuid=uuid)
|
||||
.delete())
|
||||
(self.model_query(models.Env, session=session)
|
||||
.filter_by(uuid=uuid)
|
||||
.delete())
|
||||
# NOTE(boris-42): Add queries to delete corresponding
|
||||
# task and verify results, when they switch to Env
|
||||
|
||||
@serialize
|
||||
def platforms_list(self, env_uuid):
|
||||
@ -706,96 +709,6 @@ class Connection(object):
|
||||
.filter_by(uuid=uuid)
|
||||
.update(values))
|
||||
|
||||
def _deployment_get(self, deployment, session=None):
|
||||
stored_deployment = self.model_query(
|
||||
models.Deployment,
|
||||
session=session).filter_by(name=deployment).first()
|
||||
if not stored_deployment:
|
||||
stored_deployment = self.model_query(
|
||||
models.Deployment,
|
||||
session=session).filter_by(uuid=deployment).first()
|
||||
|
||||
if not stored_deployment:
|
||||
raise exceptions.DBRecordNotFound(
|
||||
criteria="name or uuid is %s" % deployment,
|
||||
table="deployments")
|
||||
return stored_deployment
|
||||
|
||||
@serialize
|
||||
def deployment_create(self, values):
|
||||
deployment = models.Deployment()
|
||||
try:
|
||||
deployment.update(values)
|
||||
deployment.save()
|
||||
except db_exc.DBDuplicateEntry:
|
||||
raise exceptions.DBRecordExists(
|
||||
field="name", value=values["name"], table="deployments")
|
||||
return deployment
|
||||
|
||||
def deployment_delete(self, uuid):
|
||||
session = get_session()
|
||||
with session.begin():
|
||||
count = (self.model_query(models.Resource, session=session).
|
||||
filter_by(deployment_uuid=uuid).count())
|
||||
if count:
|
||||
raise exceptions.DBConflict(
|
||||
"There are allocated resources for the deployment %s"
|
||||
% uuid)
|
||||
|
||||
count = (self.model_query(models.Deployment, session=session).
|
||||
filter_by(uuid=uuid).delete(synchronize_session=False))
|
||||
if not count:
|
||||
raise exceptions.DBRecordNotFound(
|
||||
criteria="uuid: %s" % uuid, table="deployments")
|
||||
|
||||
@serialize
|
||||
def deployment_get(self, deployment):
|
||||
return self._deployment_get(deployment)
|
||||
|
||||
@serialize
|
||||
def deployment_update(self, deployment, values):
|
||||
session = get_session()
|
||||
values.pop("uuid", None)
|
||||
with session.begin():
|
||||
dpl = self._deployment_get(deployment, session=session)
|
||||
dpl.update(values)
|
||||
return dpl
|
||||
|
||||
@serialize
|
||||
def deployment_list(self, status=None, parent_uuid=None, name=None):
|
||||
query = (self.model_query(models.Deployment).
|
||||
filter_by(parent_uuid=parent_uuid))
|
||||
|
||||
if name:
|
||||
query = query.filter_by(name=name)
|
||||
if status:
|
||||
query = query.filter_by(status=status)
|
||||
return query.all()
|
||||
|
||||
@serialize
|
||||
def resource_create(self, values):
|
||||
resource = models.Resource()
|
||||
resource.update(values)
|
||||
resource.save()
|
||||
return resource
|
||||
|
||||
@serialize
|
||||
def resource_get_all(self, deployment_uuid, provider_name=None, type=None):
|
||||
query = (self.model_query(models.Resource).
|
||||
filter_by(deployment_uuid=deployment_uuid))
|
||||
if provider_name is not None:
|
||||
query = query.filter_by(provider_name=provider_name)
|
||||
if type is not None:
|
||||
query = query.filter_by(type=type)
|
||||
return query.all()
|
||||
|
||||
def resource_delete(self, id):
|
||||
count = (self.model_query(models.Resource).
|
||||
filter_by(id=id).delete(synchronize_session=False))
|
||||
if not count:
|
||||
raise exceptions.DBRecordNotFound(
|
||||
criteria="id: %s" % id, table="resources")
|
||||
|
||||
@serialize
|
||||
def verifier_create(self, name, vtype, platform, source, version,
|
||||
system_wide, extra_settings=None):
|
||||
@ -851,13 +764,13 @@ class Connection(object):
|
||||
return verifier
|
||||
|
||||
@serialize
|
||||
def verification_create(self, verifier_id, deployment_id, tags=None,
|
||||
def verification_create(self, verifier_id, env, tags=None,
|
||||
run_args=None):
|
||||
verifier = self._verifier_get(verifier_id)
|
||||
deployment = self._deployment_get(deployment_id)
|
||||
env = self.env_get(env)
|
||||
verification = models.Verification()
|
||||
verification.update({"verifier_uuid": verifier.uuid,
|
||||
"deployment_uuid": deployment["uuid"],
|
||||
"env_uuid": env["uuid"],
|
||||
"run_args": run_args})
|
||||
verification.save()
|
||||
|
||||
@ -888,7 +801,7 @@ class Connection(object):
|
||||
return verification
|
||||
|
||||
@serialize
|
||||
def verification_list(self, verifier_id=None, deployment_id=None,
|
||||
def verification_list(self, verifier_id=None, env=None,
|
||||
tags=None, status=None):
|
||||
session = get_session()
|
||||
with session.begin():
|
||||
@ -896,10 +809,9 @@ class Connection(object):
|
||||
if verifier_id:
|
||||
verifier = self._verifier_get(verifier_id, session=session)
|
||||
filter_by["verifier_uuid"] = verifier.uuid
|
||||
if deployment_id:
|
||||
deployment = self._deployment_get(deployment_id,
|
||||
session=session)
|
||||
filter_by["deployment_uuid"] = deployment.uuid
|
||||
if env:
|
||||
env = self.env_get(env)
|
||||
filter_by["env_uuid"] = env["uuid"]
|
||||
if status:
|
||||
filter_by["status"] = status
|
||||
|
||||
|
@ -0,0 +1,227 @@
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""move_deployment_to_env
|
||||
|
||||
Revision ID: 7287df262dbc
|
||||
Revises: a43700a813a5
|
||||
Create Date: 2018-01-08 15:16:43.023067
|
||||
|
||||
"""
|
||||
|
||||
import copy
|
||||
import uuid
|
||||
|
||||
from alembic import op
|
||||
from oslo_utils import timeutils
|
||||
import sqlalchemy as sa
|
||||
|
||||
from rally.common.db.sqlalchemy import types as sa_types
|
||||
from rally import exceptions
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "7287df262dbc"
|
||||
down_revision = "a43700a813a5"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
STATUS_MAP = {
|
||||
"deploy->init": "INITIALIZING",
|
||||
"deploy->started": "INITIALIZING",
|
||||
"deploy->finished": "READY",
|
||||
"deploy->failed": "FAILED TO CREATE",
|
||||
"deploy->inconsistent": "FAILED TO CREATE",
|
||||
"deploy->subdeploy": "INITIALIZING",
|
||||
"cleanup->started": "CLEANING",
|
||||
"cleanup->failed": "READY",
|
||||
"cleanup->finished": "READY"
|
||||
}
|
||||
|
||||
|
||||
deployments_helper = sa.Table(
|
||||
"deployments",
|
||||
sa.MetaData(),
|
||||
sa.Column("id", sa.Integer, primary_key=True, autoincrement=True),
|
||||
sa.Column("uuid", sa.String(36), nullable=False),
|
||||
sa.Column("name", sa.String(255)),
|
||||
sa.Column("config", sa_types.MutableJSONEncodedDict()),
|
||||
sa.Column("credentials", sa_types.MutableJSONEncodedDict()),
|
||||
sa.Column("enum_deployments_status", sa.Enum(*STATUS_MAP.keys())),
|
||||
sa.Column("created_at", sa.DateTime),
|
||||
)
|
||||
|
||||
envs_helper = sa.Table(
|
||||
"envs",
|
||||
sa.MetaData(),
|
||||
sa.Column("id", sa.Integer, primary_key=True, autoincrement=True),
|
||||
sa.Column("uuid", sa.String(36), nullable=False),
|
||||
|
||||
sa.Column("name", sa.String(255)),
|
||||
sa.Column("description", sa.Text),
|
||||
sa.Column("status", sa.String(36)),
|
||||
|
||||
sa.Column("extras", sa_types.MutableJSONEncodedDict),
|
||||
sa.Column("spec", sa_types.MutableJSONEncodedDict),
|
||||
|
||||
sa.Column("created_at", sa.DateTime),
|
||||
sa.Column("updated_at", sa.DateTime)
|
||||
)
|
||||
|
||||
platforms_helper = sa.Table(
|
||||
"platforms",
|
||||
sa.MetaData(),
|
||||
sa.Column("id", sa.Integer, primary_key=True, autoincrement=True),
|
||||
sa.Column("uuid", sa.String(36), nullable=False),
|
||||
sa.Column("env_uuid", sa.String(36)),
|
||||
|
||||
sa.Column("status", sa.String(36)),
|
||||
|
||||
sa.Column("plugin_name", sa.String(36)),
|
||||
sa.Column("plugin_spec", sa_types.MutableJSONEncodedDict),
|
||||
sa.Column("plugin_data", sa_types.MutableJSONEncodedDict),
|
||||
|
||||
sa.Column("platform_name", sa.String(36)),
|
||||
sa.Column("platform_data", sa_types.MutableJSONEncodedDict),
|
||||
|
||||
sa.Column("created_at", sa.DateTime),
|
||||
sa.Column("updated_at", sa.DateTime)
|
||||
)
|
||||
|
||||
tasks_helper = sa.Table(
|
||||
"tasks",
|
||||
sa.MetaData(),
|
||||
sa.Column("id", sa.Integer, primary_key=True, autoincrement=True),
|
||||
sa.Column("uuid", sa.String(36), nullable=False),
|
||||
|
||||
sa.Column("env_uuid", sa.String(36)),
|
||||
sa.Column("deployment_uuid", sa.String(36))
|
||||
)
|
||||
|
||||
verifications_helper = sa.Table(
|
||||
"verifications",
|
||||
sa.MetaData(),
|
||||
sa.Column("id", sa.Integer, primary_key=True, autoincrement=True),
|
||||
sa.Column("uuid", sa.String(36), nullable=False),
|
||||
|
||||
sa.Column("env_uuid", sa.String(36)),
|
||||
sa.Column("deployment_uuid", sa.String(36))
|
||||
)
|
||||
|
||||
|
||||
def upgrade():
|
||||
|
||||
connection = op.get_bind()
|
||||
for deployment in connection.execute(deployments_helper.select()):
|
||||
status = "FAILED TO CREATE"
|
||||
spec = deployment.config
|
||||
extras = {}
|
||||
platform_data = None
|
||||
if isinstance(spec, dict) and (
|
||||
# existing cloud is only one deployment engine which we
|
||||
# continue supporting
|
||||
spec.get("type", "") == "ExistingCloud"
|
||||
# We know only about one credential type and it doesn't require
|
||||
# writing additional plugins at the moment.
|
||||
and (set(spec["creds"]) == {"openstack"}
|
||||
or not spec["creds"])):
|
||||
|
||||
status = STATUS_MAP[deployment.enum_deployments_status]
|
||||
extras = deployment.config.get("extra", {})
|
||||
if "openstack" in spec["creds"]:
|
||||
spec = {"existing@openstack": spec["creds"]["openstack"]}
|
||||
creds = copy.deepcopy(spec["existing@openstack"])
|
||||
|
||||
platform_data = {
|
||||
"admin": creds.pop("admin", {}),
|
||||
"users": creds.pop("users", [])
|
||||
}
|
||||
platform_data["admin"].update(creds)
|
||||
for user in platform_data["users"]:
|
||||
user.update(creds)
|
||||
else:
|
||||
# empty deployment
|
||||
spec = {}
|
||||
|
||||
connection.execute(
|
||||
envs_helper.insert(),
|
||||
[{
|
||||
"uuid": deployment.uuid,
|
||||
"name": deployment.name,
|
||||
"description": "",
|
||||
"status": status,
|
||||
"spec": spec,
|
||||
"extras": extras,
|
||||
"created_at": deployment.created_at,
|
||||
"updated_at": timeutils.utcnow()
|
||||
}]
|
||||
)
|
||||
if platform_data:
|
||||
connection.execute(
|
||||
platforms_helper.insert(),
|
||||
[{
|
||||
"uuid": str(uuid.uuid4()),
|
||||
"env_uuid": deployment.uuid,
|
||||
"status": "READY",
|
||||
"plugin_name": "existing@openstack",
|
||||
"plugin_spec": spec["existing@openstack"],
|
||||
"plugin_data": {},
|
||||
"platform_name": "openstack",
|
||||
"platform_data": platform_data,
|
||||
"created_at": timeutils.utcnow(),
|
||||
"updated_at": timeutils.utcnow()
|
||||
}]
|
||||
)
|
||||
|
||||
op.add_column(
|
||||
"verifications",
|
||||
sa.Column("env_uuid", sa.String(36))
|
||||
)
|
||||
op.add_column(
|
||||
"tasks",
|
||||
sa.Column("env_uuid", sa.String(36))
|
||||
)
|
||||
|
||||
conn = op.get_bind()
|
||||
|
||||
conn.execute(
|
||||
tasks_helper.update().values(
|
||||
env_uuid=tasks_helper.c.deployment_uuid)
|
||||
)
|
||||
conn.execute(
|
||||
verifications_helper.update().values(
|
||||
env_uuid=verifications_helper.c.deployment_uuid)
|
||||
)
|
||||
|
||||
with op.batch_alter_table("tasks") as batch_op:
|
||||
batch_op.alter_column("env_uuid", nullable=False)
|
||||
batch_op.drop_index("task_deployment")
|
||||
batch_op.drop_column("deployment_uuid")
|
||||
|
||||
with op.batch_alter_table("verifications") as batch_op:
|
||||
batch_op.alter_column("env_uuid", nullable=False)
|
||||
batch_op.drop_column("deployment_uuid")
|
||||
|
||||
op.drop_index("resource_deployment_uuid", "resources")
|
||||
op.drop_index("resource_provider_name", "resources")
|
||||
op.drop_index("resource_type", "resources")
|
||||
op.drop_index("resource_provider_name_and_type", "resources")
|
||||
op.drop_table("resources")
|
||||
|
||||
op.drop_index("deployment_uuid", "deployments")
|
||||
op.drop_index("deployment_parent_uuid", "deployments")
|
||||
op.drop_table("deployments")
|
||||
|
||||
|
||||
def downgrade():
|
||||
raise exceptions.DowngradeNotSupported()
|
@ -56,86 +56,6 @@ class RallyBase(models.ModelBase):
|
||||
super(RallyBase, self).save(session=session)
|
||||
|
||||
|
||||
class Deployment(BASE, RallyBase):
|
||||
"""Represent a deployment of OpenStack."""
|
||||
__tablename__ = "deployments"
|
||||
__table_args__ = (
|
||||
sa.Index("deployment_uuid", "uuid", unique=True),
|
||||
sa.Index("deployment_parent_uuid", "parent_uuid"),
|
||||
)
|
||||
|
||||
id = sa.Column(sa.Integer, primary_key=True, autoincrement=True)
|
||||
uuid = sa.Column(sa.String(36), default=UUID, nullable=False)
|
||||
parent_uuid = sa.Column(
|
||||
sa.String(36),
|
||||
sa.ForeignKey(uuid, use_alter=True, name="fk_parent_uuid"),
|
||||
default=None,
|
||||
)
|
||||
name = sa.Column(sa.String(255), unique=True)
|
||||
started_at = sa.Column(sa.DateTime)
|
||||
completed_at = sa.Column(sa.DateTime)
|
||||
# XXX(akscram): Do we need to explicitly store a name of the
|
||||
# deployment engine?
|
||||
# engine_name = sa.Column(sa.String(36))
|
||||
|
||||
config = sa.Column(
|
||||
sa_types.MutableJSONEncodedDict,
|
||||
default={},
|
||||
nullable=False,
|
||||
)
|
||||
|
||||
credentials = sa.Column(
|
||||
sa_types.MutableJSONEncodedDict, default={}, nullable=False)
|
||||
|
||||
status = sa.Column(
|
||||
sa.Enum(*consts.DeployStatus, name="enum_deploy_status"),
|
||||
name="enum_deployments_status",
|
||||
default=consts.DeployStatus.DEPLOY_INIT,
|
||||
nullable=False,
|
||||
)
|
||||
|
||||
parent = sa.orm.relationship(
|
||||
"Deployment",
|
||||
backref=sa.orm.backref("subdeploys"),
|
||||
remote_side=[uuid],
|
||||
foreign_keys=parent_uuid,
|
||||
)
|
||||
|
||||
|
||||
class Resource(BASE, RallyBase):
|
||||
"""Represent a resource of a deployment."""
|
||||
__tablename__ = "resources"
|
||||
__table_args__ = (
|
||||
sa.Index("resource_deployment_uuid", "deployment_uuid"),
|
||||
sa.Index("resource_provider_name", "deployment_uuid", "provider_name"),
|
||||
sa.Index("resource_type", "deployment_uuid", "type"),
|
||||
sa.Index("resource_provider_name_and_type", "deployment_uuid",
|
||||
"provider_name", "type"),
|
||||
)
|
||||
|
||||
id = sa.Column(sa.Integer, primary_key=True, autoincrement=True)
|
||||
provider_name = sa.Column(sa.String(255))
|
||||
type = sa.Column(sa.String(255))
|
||||
|
||||
info = sa.Column(
|
||||
sa_types.MutableJSONEncodedDict,
|
||||
default={},
|
||||
nullable=False,
|
||||
)
|
||||
|
||||
deployment_uuid = sa.Column(
|
||||
sa.String(36),
|
||||
sa.ForeignKey(Deployment.uuid),
|
||||
nullable=False,
|
||||
)
|
||||
deployment = sa.orm.relationship(
|
||||
Deployment,
|
||||
backref=sa.orm.backref("resources"),
|
||||
foreign_keys=deployment_uuid,
|
||||
primaryjoin=(deployment_uuid == Deployment.uuid),
|
||||
)
|
||||
|
||||
|
||||
class Env(BASE, RallyBase):
|
||||
"""Represent a environment."""
|
||||
__tablename__ = "envs"
|
||||
@ -183,24 +103,12 @@ class Task(BASE, RallyBase):
|
||||
__table_args__ = (
|
||||
sa.Index("task_uuid", "uuid", unique=True),
|
||||
sa.Index("task_status", "status"),
|
||||
sa.Index("task_deployment", "deployment_uuid"),
|
||||
)
|
||||
|
||||
id = sa.Column(sa.Integer, primary_key=True, autoincrement=True)
|
||||
uuid = sa.Column(sa.String(36), default=UUID, nullable=False)
|
||||
|
||||
deployment_uuid = sa.Column(
|
||||
sa.String(36),
|
||||
sa.ForeignKey(Deployment.uuid),
|
||||
nullable=False,
|
||||
)
|
||||
|
||||
deployment = sa.orm.relationship(
|
||||
Deployment,
|
||||
backref=sa.orm.backref("tasks"),
|
||||
foreign_keys=deployment_uuid,
|
||||
primaryjoin=(deployment_uuid == Deployment.uuid),
|
||||
)
|
||||
env_uuid = sa.Column(sa.String(36), nullable=False)
|
||||
|
||||
# we do not save the whole input task
|
||||
input_task = deferred(sa.Column(sa.Text, default=""))
|
||||
@ -433,9 +341,7 @@ class Verification(BASE, RallyBase):
|
||||
verifier_uuid = sa.Column(sa.String(36),
|
||||
sa.ForeignKey(Verifier.uuid),
|
||||
nullable=False)
|
||||
deployment_uuid = sa.Column(sa.String(36),
|
||||
sa.ForeignKey(Deployment.uuid),
|
||||
nullable=False)
|
||||
env_uuid = sa.Column(sa.String(36), nullable=False)
|
||||
|
||||
run_args = sa.Column(sa_types.MutableJSONEncodedDict)
|
||||
|
||||
|
@ -13,14 +13,11 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import datetime as dt
|
||||
|
||||
import jsonschema
|
||||
|
||||
from rally.common import db
|
||||
from rally.common import logging
|
||||
from rally import consts
|
||||
from rally.deployment import credential
|
||||
from rally.env import env_mgr
|
||||
from rally import exceptions
|
||||
|
||||
|
||||
@ -48,15 +45,57 @@ CREDENTIALS_SCHEMA = {
|
||||
}
|
||||
|
||||
|
||||
_STATUS_OLD_TO_NEW = {
|
||||
consts.DeployStatus.DEPLOY_INIT: env_mgr.STATUS.INIT,
|
||||
consts.DeployStatus.DEPLOY_STARTED: env_mgr.STATUS.INIT,
|
||||
consts.DeployStatus.DEPLOY_FINISHED: env_mgr.STATUS.READY,
|
||||
consts.DeployStatus.DEPLOY_FAILED: env_mgr.STATUS.FAILED_TO_CREATE,
|
||||
consts.DeployStatus.DEPLOY_INCONSISTENT: env_mgr.STATUS.FAILED_TO_CREATE,
|
||||
consts.DeployStatus.DEPLOY_SUBDEPLOY: env_mgr.STATUS.INIT,
|
||||
consts.DeployStatus.CLEANUP_STARTED: env_mgr.STATUS.CLEANING,
|
||||
consts.DeployStatus.CLEANUP_FAILED: env_mgr.STATUS.READY,
|
||||
consts.DeployStatus.CLEANUP_FINISHED: env_mgr.STATUS.READY
|
||||
}
|
||||
_STATUS_NEW_TO_OLD = {
|
||||
env_mgr.STATUS.INIT: consts.DeployStatus.DEPLOY_INIT,
|
||||
env_mgr.STATUS.READY: consts.DeployStatus.DEPLOY_FINISHED,
|
||||
env_mgr.STATUS.FAILED_TO_CREATE: consts.DeployStatus.DEPLOY_FAILED,
|
||||
env_mgr.STATUS.CLEANING: consts.DeployStatus.CLEANUP_STARTED,
|
||||
env_mgr.STATUS.DESTROYING: consts.DeployStatus.DEPLOY_INIT,
|
||||
env_mgr.STATUS.FAILED_TO_DESTROY: consts.DeployStatus.DEPLOY_INCONSISTENT,
|
||||
env_mgr.STATUS.DESTROYED: consts.DeployStatus.DEPLOY_INIT
|
||||
}
|
||||
|
||||
|
||||
class Deployment(object):
|
||||
"""Represents a deployment object."""
|
||||
TIME_FORMAT = consts.TimeFormat.ISO8601
|
||||
|
||||
def __init__(self, deployment=None, **attributes):
|
||||
def __init__(self, deployment=None, name=None, config=None, extras=None):
|
||||
if deployment:
|
||||
self.deployment = deployment
|
||||
self._env = deployment
|
||||
else:
|
||||
self.deployment = db.deployment_create(attributes)
|
||||
self._env = env_mgr.EnvManager.create(
|
||||
name=name,
|
||||
spec=config or {},
|
||||
description="",
|
||||
extras=extras or {})
|
||||
self._env_data = self._env.data
|
||||
self._all_credentials = {}
|
||||
for p in self._env_data["platforms"]:
|
||||
if p["plugin_name"].startswith("existing@"):
|
||||
p["plugin_name"] = p["plugin_name"][9:]
|
||||
self._all_credentials[p["plugin_name"]] = [p["platform_data"]]
|
||||
|
||||
self.config = {}
|
||||
for p_name, p_cfg in self._env_data["spec"].items():
|
||||
if p_name.startswith("existing@"):
|
||||
p_name = p_name[9:]
|
||||
self.config[p_name] = p_cfg
|
||||
|
||||
@property
|
||||
def env_obj(self):
|
||||
return self._env
|
||||
|
||||
def __getitem__(self, key):
|
||||
# TODO(astudenov): remove this in future releases
|
||||
@ -65,48 +104,55 @@ class Deployment(object):
|
||||
"Use deployment.get_credentials_for('openstack')"
|
||||
"['%s'] to get credentials." % (key, key))
|
||||
return self.get_credentials_for("openstack")[key]
|
||||
return self.deployment[key]
|
||||
if key == "status":
|
||||
status = self._env.status
|
||||
return _STATUS_NEW_TO_OLD.get(status, status)
|
||||
elif key == "extra":
|
||||
return self._env_data["extras"]
|
||||
if hasattr(self._env, key):
|
||||
return getattr(self._env, key)
|
||||
elif hasattr(self, key):
|
||||
return getattr(self, key)
|
||||
return self._env_data[key]
|
||||
|
||||
def to_dict(self):
|
||||
result = {}
|
||||
formatters = ["created_at", "completed_at", "started_at", "updated_at"]
|
||||
for field, value in self.deployment.items():
|
||||
if field in formatters:
|
||||
if value is None:
|
||||
value = "n/a"
|
||||
else:
|
||||
value = value.strftime(self.TIME_FORMAT)
|
||||
result[field] = value
|
||||
return result
|
||||
return {
|
||||
"id": self._env_data["id"],
|
||||
"uuid": self._env_data["uuid"],
|
||||
"parent_uuid": None,
|
||||
"name": self._env_data["name"],
|
||||
"created_at": self._env_data["created_at"].strftime(
|
||||
self.TIME_FORMAT),
|
||||
"started_at": self._env_data["created_at"].strftime(
|
||||
self.TIME_FORMAT),
|
||||
"completed_at": "n/a",
|
||||
"updated_at": self._env_data["updated_at"].strftime(
|
||||
self.TIME_FORMAT),
|
||||
"config": self.config,
|
||||
"credentials": self._all_credentials,
|
||||
"status": self["status"],
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def get(deploy):
|
||||
return Deployment(db.deployment_get(deploy))
|
||||
return Deployment(env_mgr.EnvManager.get(deploy))
|
||||
|
||||
@staticmethod
|
||||
def list(status=None, parent_uuid=None, name=None):
|
||||
return [Deployment(deployment) for deployment in
|
||||
db.deployment_list(status, parent_uuid, name)]
|
||||
# we do not use parent_uuid...
|
||||
if name:
|
||||
try:
|
||||
env = env_mgr.EnvManager(name)
|
||||
except exceptions.DBRecordNotFound:
|
||||
return []
|
||||
envs = [env]
|
||||
else:
|
||||
envs = env_mgr.EnvManager.list(status=status)
|
||||
|
||||
@staticmethod
|
||||
def delete_by_uuid(uuid):
|
||||
db.deployment_delete(uuid)
|
||||
return [Deployment(e) for e in envs]
|
||||
|
||||
def _update(self, values):
|
||||
self.deployment = db.deployment_update(self.deployment["uuid"], values)
|
||||
|
||||
def update_status(self, status):
|
||||
self._update({"status": status})
|
||||
|
||||
def update_name(self, name):
|
||||
self._update({"name": name})
|
||||
|
||||
def update_config(self, config):
|
||||
self._update({"config": config})
|
||||
|
||||
def update_credentials(self, credentials):
|
||||
jsonschema.validate(credentials, CREDENTIALS_SCHEMA)
|
||||
self._update({"credentials": credentials})
|
||||
self.deployment = db.deployment_update(self._env.uuid, values)
|
||||
|
||||
def get_validation_context(self):
|
||||
ctx = {}
|
||||
@ -115,38 +161,34 @@ class Deployment(object):
|
||||
return ctx
|
||||
|
||||
def verify_connections(self):
|
||||
for platform_creds in self.get_all_credentials().values():
|
||||
for creds in platform_creds:
|
||||
if creds["admin"]:
|
||||
creds["admin"].verify_connection()
|
||||
|
||||
for user in creds["users"]:
|
||||
user.verify_connection()
|
||||
for platform_name, result in self._env.check_health().items():
|
||||
if not result["available"]:
|
||||
raise exceptions.RallyException(
|
||||
"Platform %s is not available: %s." % (platform_name,
|
||||
result["message"]))
|
||||
|
||||
def get_platforms(self):
|
||||
return self.deployment["credentials"].keys()
|
||||
return self._all_credentials.keys()
|
||||
|
||||
def get_all_credentials(self):
|
||||
all_credentials = {}
|
||||
for platform in self.get_platforms():
|
||||
all_credentials[platform] = []
|
||||
for platform, credentials in self._all_credentials.items():
|
||||
credential_cls = credential.get(platform)
|
||||
for credentials in self.deployment["credentials"][platform]:
|
||||
try:
|
||||
admin = credentials["admin"]
|
||||
except Exception:
|
||||
raise KeyError(credentials)
|
||||
all_credentials[platform].append({
|
||||
"admin": credential_cls(**admin) if admin else None,
|
||||
"users": [credential_cls(**user) for user in
|
||||
credentials["users"]]})
|
||||
admin = credentials[0]["admin"]
|
||||
if admin:
|
||||
admin = credential_cls(
|
||||
permission=consts.EndpointPermission.ADMIN, **admin)
|
||||
all_credentials[platform] = [{
|
||||
"admin": admin,
|
||||
"users": [credential_cls(**user) for user in
|
||||
credentials[0]["users"]]}]
|
||||
return all_credentials
|
||||
|
||||
def get_credentials_for(self, platform):
|
||||
if platform == "default":
|
||||
return {"admin": None, "users": []}
|
||||
try:
|
||||
creds = self.deployment["credentials"][platform][0]
|
||||
creds = self._all_credentials[platform][0]
|
||||
except (KeyError, IndexError):
|
||||
raise exceptions.RallyException(
|
||||
"No credentials found for %s" % platform)
|
||||
@ -155,30 +197,3 @@ class Deployment(object):
|
||||
credential_cls = credential.get(platform)
|
||||
return {"admin": credential_cls(**admin) if admin else None,
|
||||
"users": [credential_cls(**user) for user in creds["users"]]}
|
||||
|
||||
def set_started(self):
|
||||
self._update({"started_at": dt.datetime.now(),
|
||||
"status": consts.DeployStatus.DEPLOY_STARTED})
|
||||
|
||||
def set_completed(self):
|
||||
self._update({"completed_at": dt.datetime.now(),
|
||||
"status": consts.DeployStatus.DEPLOY_FINISHED})
|
||||
|
||||
def add_resource(self, provider_name, type=None, info=None):
|
||||
return db.resource_create({
|
||||
"deployment_uuid": self.deployment["uuid"],
|
||||
"provider_name": provider_name,
|
||||
"type": type,
|
||||
"info": info,
|
||||
})
|
||||
|
||||
def get_resources(self, provider_name=None, type=None):
|
||||
return db.resource_get_all(self.deployment["uuid"],
|
||||
provider_name=provider_name, type=type)
|
||||
|
||||
@staticmethod
|
||||
def delete_resource(resource_id):
|
||||
db.resource_delete(resource_id)
|
||||
|
||||
def delete(self):
|
||||
db.deployment_delete(self.deployment["uuid"])
|
||||
|
@ -307,6 +307,8 @@ class Task(object):
|
||||
self.task = task or db.task_create(attributes)
|
||||
|
||||
def __getitem__(self, key):
|
||||
if key == "deployment_uuid":
|
||||
key = "env_uuid"
|
||||
return self.task[key]
|
||||
|
||||
@staticmethod
|
||||
@ -319,8 +321,8 @@ class Task(object):
|
||||
|
||||
def to_dict(self):
|
||||
db_task = self.task
|
||||
deployment_name = db.deployment_get(
|
||||
self.task["deployment_uuid"])["name"]
|
||||
deployment_name = db.env_get(self.task["env_uuid"])["name"]
|
||||
db_task["deployment_uuid"] = db_task["env_uuid"]
|
||||
db_task["deployment_name"] = deployment_name
|
||||
self._serialize_dt(db_task)
|
||||
for subtask in db_task.get("subtasks", []):
|
||||
@ -340,7 +342,7 @@ class Task(object):
|
||||
@staticmethod
|
||||
def list(status=None, deployment=None, tags=None):
|
||||
return [Task(db_task) for db_task in db.task_list(
|
||||
status, deployment=deployment, tags=tags)]
|
||||
status, env=deployment, tags=tags)]
|
||||
|
||||
@staticmethod
|
||||
def delete_by_uuid(uuid, status=None):
|
||||
|
@ -28,6 +28,7 @@ class Verification(object):
|
||||
in the database
|
||||
"""
|
||||
self._db_entry = verification
|
||||
self._db_entry["deployment_uuid"] = self._db_entry["env_uuid"]
|
||||
|
||||
def __getattr__(self, attr):
|
||||
return self._db_entry[attr]
|
||||
@ -38,7 +39,7 @@ class Verification(object):
|
||||
def to_dict(self, item=None):
|
||||
data = {}
|
||||
formatters = ["created_at", "updated_at"]
|
||||
fields = ["deployment_uuid", "verifier_uuid", "uuid", "id",
|
||||
fields = ["deployment_uuid", "env_uuid", "verifier_uuid", "uuid", "id",
|
||||
"unexpected_success", "status", "tests", "skipped",
|
||||
"tags", "tests_duration", "run_args", "success",
|
||||
"expected_failures", "tests_count", "failures"]
|
||||
|
@ -15,7 +15,6 @@
|
||||
|
||||
import abc
|
||||
|
||||
import jsonschema
|
||||
import six
|
||||
|
||||
from rally.common.plugin import plugin
|
||||
@ -59,35 +58,3 @@ class Credential(plugin.Plugin):
|
||||
@classmethod
|
||||
def get_validation_context(cls):
|
||||
return {}
|
||||
|
||||
|
||||
def configure_builder(platform):
|
||||
def wrapper(cls):
|
||||
cls = plugin.configure(name="credential_builder",
|
||||
platform=platform)(cls)
|
||||
return cls
|
||||
return wrapper
|
||||
|
||||
|
||||
def get_builder(platform):
|
||||
return CredentialBuilder.get(name="credential_builder",
|
||||
platform=platform)
|
||||
|
||||
|
||||
@plugin.base()
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class CredentialBuilder(plugin.Plugin):
|
||||
"""Base class for extensions of ExistingCloud deployment."""
|
||||
|
||||
CONFIG_SCHEMA = {"type": "null"}
|
||||
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
|
||||
@classmethod
|
||||
def validate(cls, config):
|
||||
jsonschema.validate(config, cls.CONFIG_SCHEMA)
|
||||
|
||||
@abc.abstractmethod
|
||||
def build_credentials(self):
|
||||
"""Builds credentials from provided configuration"""
|
||||
|
@ -1,136 +0,0 @@
|
||||
# Copyright 2013: Mirantis Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import abc
|
||||
|
||||
import jsonschema
|
||||
import six
|
||||
|
||||
from rally.common import logging
|
||||
from rally.common.plugin import plugin
|
||||
from rally import consts
|
||||
from rally import exceptions
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
configure = plugin.configure
|
||||
|
||||
|
||||
@plugin.base()
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class Engine(plugin.Plugin):
|
||||
"""Base class of all deployment engines.
|
||||
|
||||
It's a base class with self-discovery of subclasses. Each subclass
|
||||
has to implement deploy() and cleanup() methods. By default, each engine
|
||||
located as a submodule of the package rally.deployment.engines is
|
||||
auto-discovered.
|
||||
|
||||
Example of usage with a simple engine:
|
||||
|
||||
# Add new engine with __name__ == "A"
|
||||
class A(Engine):
|
||||
def __init__(self, deployment):
|
||||
# do something
|
||||
|
||||
def deploy(self):
|
||||
# Make a deployment and return OpenStack credentials.
|
||||
# The credentials may have either admin or ordinary users
|
||||
# permissions (depending on how the deploy engine has been
|
||||
# initialized).
|
||||
return [credential_1, credential_2, ...]
|
||||
|
||||
def cleanup(self):
|
||||
# Destroy OpenStack deployment and free resource
|
||||
|
||||
An instance of this class used as a context manager on any unsafe
|
||||
operations to a deployment. Any unhandled exceptions bring a status
|
||||
of the deployment to the inconsistent state.
|
||||
|
||||
with Engine.get_engine("A", deployment) as deploy:
|
||||
# deploy is an instance of the A engine
|
||||
# perform all usage operations on your cloud
|
||||
"""
|
||||
def __init__(self, deployment):
|
||||
self.deployment = deployment
|
||||
|
||||
@property
|
||||
def config(self):
|
||||
return self.deployment["config"]
|
||||
|
||||
def validate(self, config=None):
|
||||
# TODO(sskripnick): remove this checking when config schema
|
||||
# is done for all available engines
|
||||
if hasattr(self, "CONFIG_SCHEMA"):
|
||||
jsonschema.validate(config or self.config, self.CONFIG_SCHEMA)
|
||||
|
||||
# FIXME(boris-42): Get rid of this method
|
||||
@staticmethod
|
||||
def get_engine(name, deployment):
|
||||
"""Returns instance of a deploy engine with corresponding name."""
|
||||
try:
|
||||
engine_cls = Engine.get(name)
|
||||
return engine_cls(deployment)
|
||||
except exceptions.PluginNotFound:
|
||||
LOG.error(
|
||||
"Deployment %(uuid)s: Plugin %(name)s doesn't exist."
|
||||
% {"uuid": deployment["uuid"], "name": name})
|
||||
deployment.update_status(consts.DeployStatus.DEPLOY_FAILED)
|
||||
raise
|
||||
|
||||
@abc.abstractmethod
|
||||
def deploy(self):
|
||||
"""Deploy OpenStack cloud and return credentials."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def cleanup(self):
|
||||
"""Cleanup OpenStack deployment."""
|
||||
|
||||
@logging.log_deploy_wrapper(LOG.info, "OpenStack cloud deployment.")
|
||||
def make_deploy(self):
|
||||
self.deployment.set_started()
|
||||
credentials = self.deploy()
|
||||
self.deployment.set_completed()
|
||||
return credentials
|
||||
|
||||
@logging.log_deploy_wrapper(LOG.info,
|
||||
"Destroy cloud and free allocated resources.")
|
||||
def make_cleanup(self):
|
||||
self.deployment.update_status(consts.DeployStatus.CLEANUP_STARTED)
|
||||
self.cleanup()
|
||||
self.deployment.update_status(consts.DeployStatus.CLEANUP_FINISHED)
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, exc_traceback):
|
||||
if exc_type is not None:
|
||||
exc_info = None
|
||||
if not issubclass(exc_type, exceptions.InvalidArgumentsException):
|
||||
exc_info = (exc_type, exc_value, exc_traceback)
|
||||
LOG.error("Deployment %s: Error has occurred in context "
|
||||
"of the deployment" % self.deployment["uuid"],
|
||||
exc_info=exc_info)
|
||||
status = self.deployment["status"]
|
||||
if status in (consts.DeployStatus.DEPLOY_INIT,
|
||||
consts.DeployStatus.DEPLOY_STARTED):
|
||||
self.deployment.update_status(
|
||||
consts.DeployStatus.DEPLOY_FAILED)
|
||||
elif status == consts.DeployStatus.DEPLOY_FINISHED:
|
||||
self.deployment.update_status(
|
||||
consts.DeployStatus.DEPLOY_INCONSISTENT)
|
||||
elif status == consts.DeployStatus.CLEANUP_STARTED:
|
||||
self.deployment.update_status(
|
||||
consts.DeployStatus.CLEANUP_FAILED)
|
@ -1,260 +0,0 @@
|
||||
# Copyright 2013: Mirantis Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import copy
|
||||
|
||||
from rally.common import logging
|
||||
from rally import consts
|
||||
from rally.deployment import credential
|
||||
from rally.deployment import engine
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@engine.configure(name="ExistingCloud")
|
||||
class ExistingCloud(engine.Engine):
|
||||
"""Platform independent deployment engine.
|
||||
|
||||
This deployment engine allows specifing list of credentials for one
|
||||
or more platforms.
|
||||
|
||||
Example configuration:
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"type": "ExistingCloud",
|
||||
"creds": {
|
||||
"openstack": {
|
||||
"auth_url": "http://localhost:5000/v3/",
|
||||
"region_name": "RegionOne",
|
||||
"endpoint_type": "public",
|
||||
"admin": {
|
||||
"username": "admin",
|
||||
"password": "admin",
|
||||
"user_domain_name": "admin",
|
||||
"project_name": "admin",
|
||||
"project_domain_name": "admin",
|
||||
},
|
||||
"https_insecure": False,
|
||||
"https_cacert": "",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
To specify extra options use can use special "extra" parameter:
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"type": "ExistingCloud",
|
||||
...
|
||||
"extra": {"some_var": "some_value"}
|
||||
}
|
||||
|
||||
It also support deprecated version of configuration that supports
|
||||
only OpenStack.
|
||||
|
||||
keystone v2:
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"type": "ExistingCloud",
|
||||
"auth_url": "http://localhost:5000/v2.0/",
|
||||
"region_name": "RegionOne",
|
||||
"endpoint_type": "public",
|
||||
"admin": {
|
||||
"username": "admin",
|
||||
"password": "password",
|
||||
"tenant_name": "demo"
|
||||
},
|
||||
"https_insecure": False,
|
||||
"https_cacert": "",
|
||||
}
|
||||
|
||||
keystone v3 API endpoint:
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"type": "ExistingCloud",
|
||||
"auth_url": "http://localhost:5000/v3/",
|
||||
"region_name": "RegionOne",
|
||||
"endpoint_type": "public",
|
||||
"admin": {
|
||||
"username": "admin",
|
||||
"password": "admin",
|
||||
"user_domain_name": "admin",
|
||||
"project_name": "admin",
|
||||
"project_domain_name": "admin",
|
||||
},
|
||||
"https_insecure": False,
|
||||
"https_cacert": "",
|
||||
}
|
||||
|
||||
"""
|
||||
|
||||
USER_SCHEMA = {
|
||||
"type": "object",
|
||||
"oneOf": [
|
||||
{
|
||||
"description": "Keystone V2.0",
|
||||
"properties": {
|
||||
"username": {"type": "string"},
|
||||
"password": {"type": "string"},
|
||||
"tenant_name": {"type": "string"},
|
||||
},
|
||||
"required": ["username", "password", "tenant_name"],
|
||||
"additionalProperties": False
|
||||
},
|
||||
{
|
||||
"description": "Keystone V3.0",
|
||||
"properties": {
|
||||
"username": {"type": "string"},
|
||||
"password": {"type": "string"},
|
||||
"domain_name": {"type": "string"},
|
||||
"user_domain_name": {"type": "string"},
|
||||
"project_name": {"type": "string"},
|
||||
"project_domain_name": {"type": "string"},
|
||||
},
|
||||
"required": ["username", "password", "project_name"],
|
||||
"additionalProperties": False
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
OLD_CONFIG_SCHEMA = {
|
||||
"type": "object",
|
||||
"description": "Deprecated schema (openstack only)",
|
||||
"properties": {
|
||||
"type": {"type": "string"},
|
||||
"auth_url": {"type": "string"},
|
||||
"region_name": {"type": "string"},
|
||||
# NOTE(andreykurilin): it looks like we do not use endpoint
|
||||
# var at all
|
||||
"endpoint": {"type": ["string", "null"]},
|
||||
"endpoint_type": {"enum": [consts.EndpointType.ADMIN,
|
||||
consts.EndpointType.INTERNAL,
|
||||
consts.EndpointType.PUBLIC,
|
||||
None]},
|
||||
"https_insecure": {"type": "boolean"},
|
||||
"https_cacert": {"type": "string"},
|
||||
"profiler_hmac_key": {"type": ["string", "null"]},
|
||||
"profiler_conn_str": {"type": ["string", "null"]},
|
||||
"admin": USER_SCHEMA,
|
||||
"users": {"type": "array", "items": USER_SCHEMA, "minItems": 1},
|
||||
"extra": {"type": "object", "additionalProperties": True}
|
||||
},
|
||||
"anyOf": [
|
||||
{"description": "The case when the admin is specified and the "
|
||||
"users can be created via 'users' context or "
|
||||
"'existing_users' will be used.",
|
||||
"required": ["type", "auth_url", "admin"]},
|
||||
{"description": "The case when the only existing users are "
|
||||
"specified.",
|
||||
"required": ["type", "auth_url", "users"]}
|
||||
],
|
||||
"additionalProperties": False
|
||||
}
|
||||
|
||||
NEW_CONFIG_SCHEMA = {
|
||||
"type": "object",
|
||||
"description": "New schema for multiplatform deployment",
|
||||
"properties": {
|
||||
"type": {"enum": ["ExistingCloud"]},
|
||||
"creds": {
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
"^[a-z0-9_-]+$": {
|
||||
"oneOf": [
|
||||
{
|
||||
"description": "Single credential",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"description": "List of credentials",
|
||||
"type": "array",
|
||||
"items": {"type": "object"},
|
||||
"maxItems": 1,
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"extra": {"type": "object", "additionalProperties": True}
|
||||
},
|
||||
"required": ["type", "creds"],
|
||||
"additionalProperties": False
|
||||
}
|
||||
|
||||
CONFIG_SCHEMA = {"type": "object",
|
||||
"oneOf": [OLD_CONFIG_SCHEMA, NEW_CONFIG_SCHEMA]}
|
||||
|
||||
def validate(self, config=None):
|
||||
config = config or self.config
|
||||
super(ExistingCloud, self).validate(config)
|
||||
|
||||
creds_config = self._get_creds(config)
|
||||
for platform, config in creds_config.items():
|
||||
builder_cls = credential.get_builder(platform)
|
||||
for creds in config:
|
||||
builder_cls.validate(creds)
|
||||
|
||||
def _get_creds(self, config):
|
||||
# NOTE(astudenov): copy config to prevent compatibility changes
|
||||
# from saving to database
|
||||
config = copy.deepcopy(config)
|
||||
if "creds" not in config:
|
||||
# backward compatibility with old schema
|
||||
del config["type"]
|
||||
creds_config = {"openstack": [config]}
|
||||
else:
|
||||
creds_config = config["creds"]
|
||||
|
||||
# convert all credentials to list
|
||||
for platform, config in creds_config.items():
|
||||
if isinstance(config, dict):
|
||||
creds_config[platform] = [config]
|
||||
return creds_config
|
||||
|
||||
def make_deploy(self):
|
||||
platforms = (["openstack"] if "creds" not in self.config
|
||||
else self.config["creds"].keys())
|
||||
LOG.info("Save deployment '%(name)s' (uuid=%(uuid)s) with "
|
||||
"'%(platforms)s' platform%(plural)s." %
|
||||
{"name": self.deployment["name"],
|
||||
"uuid": self.deployment["uuid"],
|
||||
"platforms": "', '".join(platforms),
|
||||
"plural": "s" if len(platforms) > 1 else ""})
|
||||
self.deployment.set_started()
|
||||
credentials = self.deploy()
|
||||
self.deployment.set_completed()
|
||||
return credentials
|
||||
|
||||
def deploy(self):
|
||||
creds_config = self._get_creds(self.config)
|
||||
parsed_credentials = {}
|
||||
for platform, config in creds_config.items():
|
||||
builder_cls = credential.get_builder(platform)
|
||||
credentials = []
|
||||
for creds in config:
|
||||
builder = builder_cls(creds)
|
||||
credentials.append(builder.build_credentials())
|
||||
parsed_credentials[platform] = credentials
|
||||
return parsed_credentials
|
||||
|
||||
def cleanup(self):
|
||||
pass
|
14
rally/env/env_mgr.py
vendored
14
rally/env/env_mgr.py
vendored
@ -292,8 +292,8 @@ class EnvManager(object):
|
||||
p.platform_data, p.plugin_data = platform_data, plugin_data
|
||||
try:
|
||||
p.destroy()
|
||||
LOG.warrning("Couldn't store platform %s data to DB."
|
||||
"Attempt to destroy it succeeded." % p.uuid)
|
||||
LOG.warning("Couldn't store platform %s data to DB."
|
||||
"Attempt to destroy it succeeded." % p.uuid)
|
||||
except Exception:
|
||||
LOG.exception(
|
||||
"Couldn't store data of platform(%(uuid)s): %(name)s "
|
||||
@ -509,7 +509,7 @@ class EnvManager(object):
|
||||
def destroy(self, skip_cleanup=False):
|
||||
"""Destroys all platforms related to env.
|
||||
|
||||
:param skip_cleanup: By default, before destroying plaform it's cleaned
|
||||
:param skip_cleanup: Skip cleaning up platform resources
|
||||
"""
|
||||
cleanup_info = {"skipped": True}
|
||||
if not skip_cleanup:
|
||||
@ -569,6 +569,12 @@ class EnvManager(object):
|
||||
platforms[name]["message"] = "Successfully destroyed"
|
||||
platforms[name]["status"]["new"] = platform.STATUS.DESTROYED
|
||||
|
||||
from rally.common import objects
|
||||
|
||||
for verifier in objects.Verifier.list():
|
||||
verifier.set_deployment(self.uuid)
|
||||
verifier.manager.uninstall()
|
||||
|
||||
db.env_set_status(self.uuid, STATUS.DESTROYING, new_env_status)
|
||||
|
||||
return result
|
||||
@ -578,7 +584,7 @@ class EnvManager(object):
|
||||
|
||||
It deletes all Task and Verify results related to this env as well.
|
||||
|
||||
:param Force: Use it if you don't want to perform status check
|
||||
:param force: Use it if you don't want to perform status check
|
||||
"""
|
||||
_status = self.status
|
||||
if not force and _status != STATUS.DESTROYED:
|
||||
|
@ -100,105 +100,3 @@ class OpenStackCredential(credential.Credential):
|
||||
def clients(self, api_info=None):
|
||||
return osclients.Clients(self, api_info=api_info,
|
||||
cache=self._clients_cache)
|
||||
|
||||
|
||||
@credential.configure_builder("openstack")
|
||||
class OpenStackCredentialBuilder(credential.CredentialBuilder):
|
||||
"""Builds credentials provided by ExistingCloud config."""
|
||||
|
||||
USER_SCHEMA = {
|
||||
"type": "object",
|
||||
"oneOf": [
|
||||
{
|
||||
"description": "Keystone V2.0",
|
||||
"properties": {
|
||||
"username": {"type": "string"},
|
||||
"password": {"type": "string"},
|
||||
"tenant_name": {"type": "string"},
|
||||
},
|
||||
"required": ["username", "password", "tenant_name"],
|
||||
"additionalProperties": False
|
||||
},
|
||||
{
|
||||
"description": "Keystone V3.0",
|
||||
"properties": {
|
||||
"username": {"type": "string"},
|
||||
"password": {"type": "string"},
|
||||
"domain_name": {"type": "string"},
|
||||
"user_domain_name": {"type": "string"},
|
||||
"project_name": {"type": "string"},
|
||||
"project_domain_name": {"type": "string"},
|
||||
},
|
||||
"required": ["username", "password", "project_name"],
|
||||
"additionalProperties": False
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
CONFIG_SCHEMA = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"admin": USER_SCHEMA,
|
||||
"users": {"type": "array", "items": USER_SCHEMA, "minItems": 1},
|
||||
"auth_url": {"type": "string"},
|
||||
"region_name": {"type": "string"},
|
||||
# NOTE(andreykurilin): it looks like we do not use endpoint
|
||||
# var at all
|
||||
"endpoint": {"type": ["string", "null"]},
|
||||
"endpoint_type": {
|
||||
"enum": [consts.EndpointType.ADMIN,
|
||||
consts.EndpointType.INTERNAL,
|
||||
consts.EndpointType.PUBLIC,
|
||||
None]},
|
||||
"https_insecure": {"type": "boolean"},
|
||||
"https_cacert": {"type": "string"},
|
||||
"profiler_hmac_key": {"type": ["string", "null"]},
|
||||
"profiler_conn_str": {"type": ["string", "null"]}
|
||||
},
|
||||
"anyOf": [
|
||||
{"description": "The case when the admin is specified and the "
|
||||
"users can be created via 'users' context or "
|
||||
"'existing_users' will be used.",
|
||||
"required": ["auth_url", "admin"]},
|
||||
{"description": "The case when the only existing users are "
|
||||
"specified.",
|
||||
"required": ["auth_url", "users"]}
|
||||
],
|
||||
"additionalProperties": False
|
||||
}
|
||||
|
||||
def _create_credential(self, common, user, permission):
|
||||
cred = OpenStackCredential(
|
||||
auth_url=common["auth_url"],
|
||||
username=user["username"],
|
||||
password=user["password"],
|
||||
tenant_name=user.get("project_name", user.get("tenant_name")),
|
||||
permission=permission,
|
||||
region_name=common.get("region_name"),
|
||||
endpoint_type=common.get("endpoint_type"),
|
||||
endpoint=common.get("endpoint"),
|
||||
domain_name=user.get("domain_name"),
|
||||
user_domain_name=user.get("user_domain_name", None),
|
||||
project_domain_name=user.get("project_domain_name", None),
|
||||
https_insecure=common.get("https_insecure", False),
|
||||
https_cacert=common.get("https_cacert"),
|
||||
profiler_hmac_key=common.get("profiler_hmac_key"),
|
||||
profiler_conn_str=common.get("profiler_conn_str"))
|
||||
return cred.to_dict()
|
||||
|
||||
def build_credentials(self):
|
||||
permissions = consts.EndpointPermission
|
||||
|
||||
users = [self._create_credential(self.config, user, permissions.USER)
|
||||
for user in self.config.get("users", [])]
|
||||
|
||||
admin = None
|
||||
if self.config.get("admin"):
|
||||
admin = self._create_credential(self.config,
|
||||
self.config.get("admin"),
|
||||
permissions.ADMIN)
|
||||
|
||||
return {"admin": admin, "users": users}
|
||||
|
||||
|
||||
# NOTE(astudenov): Let's consider moving rally.osclients here
|
||||
|
@ -70,7 +70,7 @@ class TestTaskSamples(unittest.TestCase):
|
||||
user_ctx.setup()
|
||||
self.addCleanup(user_ctx.cleanup)
|
||||
|
||||
os_creds = deployment["config"]["creds"]["openstack"]
|
||||
os_creds = deployment["config"]["openstack"]
|
||||
|
||||
user = copy.copy(os_creds["admin"])
|
||||
user["username"] = ctx["users"][0]["credential"].username
|
||||
|
@ -109,27 +109,6 @@ class DeploymentTestCase(unittest.TestCase):
|
||||
self.fail("rally deployment fails to raise error for wrong"
|
||||
" authentication info")
|
||||
|
||||
def test_recreate(self):
|
||||
rally = utils.Rally()
|
||||
rally.env.update(TEST_ENV)
|
||||
rally("deployment create --name t_create_env --fromenv")
|
||||
rally("deployment recreate --deployment t_create_env")
|
||||
self.assertIn("t_create_env", rally("deployment list"))
|
||||
|
||||
def test_recreate_from_file(self):
|
||||
rally = utils.Rally()
|
||||
rally.env.update(TEST_ENV)
|
||||
rally("deployment create --name t_create_env --fromenv")
|
||||
config = json.loads(rally("deployment config"))
|
||||
config["openstack"]["auth_url"] = "http://foo/"
|
||||
file = utils.JsonTempFile(config)
|
||||
rally("deployment recreate --deployment t_create_env "
|
||||
"--filename %s" % file.filename)
|
||||
self.assertIn("t_create_env", rally("deployment list"))
|
||||
self.assertEqual(config,
|
||||
json.loads(rally("deployment config")))
|
||||
self.assertIn("http://foo/", rally("deployment show"))
|
||||
|
||||
def test_use(self):
|
||||
rally = utils.Rally()
|
||||
rally.env.update(TEST_ENV)
|
||||
|
@ -15,7 +15,6 @@
|
||||
|
||||
"""Tests for db.api layer."""
|
||||
|
||||
import copy
|
||||
import datetime as dt
|
||||
|
||||
import mock
|
||||
@ -40,7 +39,7 @@ class ConnectionTestCase(test.DBTestCase):
|
||||
class TasksTestCase(test.DBTestCase):
|
||||
def setUp(self):
|
||||
super(TasksTestCase, self).setUp()
|
||||
self.deploy = db.deployment_create({})
|
||||
self.env = db.env_create(self.id(), "INIT", "", {}, {}, {})
|
||||
|
||||
def _get_task(self, uuid):
|
||||
return db.task_get(uuid)
|
||||
@ -50,8 +49,8 @@ class TasksTestCase(test.DBTestCase):
|
||||
|
||||
def _create_task(self, values=None):
|
||||
values = values or {}
|
||||
if "deployment_uuid" not in values:
|
||||
values["deployment_uuid"] = self.deploy["uuid"]
|
||||
if "env_uuid" not in values:
|
||||
values["env_uuid"] = self.env["uuid"]
|
||||
return db.task_create(values)
|
||||
|
||||
def test_task_get_not_found(self):
|
||||
@ -127,13 +126,13 @@ class TasksTestCase(test.DBTestCase):
|
||||
FINISHED = consts.TaskStatus.FINISHED
|
||||
task_finished = sorted(self._create_task(
|
||||
{"status": FINISHED,
|
||||
"deployment_uuid": self.deploy["uuid"]}
|
||||
"env_uuid": self.env["uuid"]}
|
||||
)["uuid"] for i in moves.range(3))
|
||||
|
||||
task_all = sorted(task_init + task_finished)
|
||||
|
||||
def get_uuids(status=None, deployment=None):
|
||||
tasks = db.task_list(status=status, deployment=deployment)
|
||||
def get_uuids(status=None, env=None):
|
||||
tasks = db.task_list(status=status, env=env)
|
||||
return sorted(task["uuid"] for task in tasks)
|
||||
|
||||
self.assertEqual(task_all, get_uuids(None))
|
||||
@ -141,7 +140,7 @@ class TasksTestCase(test.DBTestCase):
|
||||
self.assertEqual(task_init, get_uuids(status=INIT))
|
||||
self.assertEqual(task_finished, get_uuids(status=FINISHED))
|
||||
self.assertRaises(exceptions.DBRecordNotFound,
|
||||
get_uuids, deployment="non-existing-deployment")
|
||||
get_uuids, env="non-existing-env")
|
||||
|
||||
deleted_task_uuid = task_finished.pop()
|
||||
db.task_delete(deleted_task_uuid)
|
||||
@ -384,8 +383,8 @@ class TasksTestCase(test.DBTestCase):
|
||||
class SubtaskTestCase(test.DBTestCase):
|
||||
def setUp(self):
|
||||
super(SubtaskTestCase, self).setUp()
|
||||
self.deploy = db.deployment_create({})
|
||||
self.task = db.task_create({"deployment_uuid": self.deploy["uuid"]})
|
||||
self.env = db.env_create(self.id(), "INIT", "", {}, {}, {})
|
||||
self.task = db.task_create({"env_uuid": self.env["uuid"]})
|
||||
|
||||
def test_subtask_create(self):
|
||||
subtask = db.subtask_create(self.task["uuid"], title="foo")
|
||||
@ -404,8 +403,8 @@ class SubtaskTestCase(test.DBTestCase):
|
||||
class WorkloadTestCase(test.DBTestCase):
|
||||
def setUp(self):
|
||||
super(WorkloadTestCase, self).setUp()
|
||||
self.deploy = db.deployment_create({})
|
||||
self.task = db.task_create({"deployment_uuid": self.deploy["uuid"]})
|
||||
self.env = db.env_create(self.id(), "INIT", "", {}, {}, {})
|
||||
self.task = db.task_create({"env_uuid": self.env["uuid"]})
|
||||
self.task_uuid = self.task["uuid"]
|
||||
self.subtask = db.subtask_create(self.task_uuid, title="foo")
|
||||
self.subtask_uuid = self.subtask["uuid"]
|
||||
@ -558,8 +557,8 @@ class WorkloadTestCase(test.DBTestCase):
|
||||
class WorkloadDataTestCase(test.DBTestCase):
|
||||
def setUp(self):
|
||||
super(WorkloadDataTestCase, self).setUp()
|
||||
self.deploy = db.deployment_create({})
|
||||
self.task = db.task_create({"deployment_uuid": self.deploy["uuid"]})
|
||||
self.env = db.env_create(self.id(), "INIT", "", {}, {}, {})
|
||||
self.task = db.task_create({"env_uuid": self.env["uuid"]})
|
||||
self.task_uuid = self.task["uuid"]
|
||||
self.subtask = db.subtask_create(self.task_uuid, title="foo")
|
||||
self.subtask_uuid = self.subtask["uuid"]
|
||||
@ -820,226 +819,6 @@ class PlatformTestCase(test.DBTestCase):
|
||||
self.assertEqual({"platform": "data2"}, in_db["platform_data"])
|
||||
|
||||
|
||||
class DeploymentTestCase(test.DBTestCase):
|
||||
def test_deployment_create(self):
|
||||
deploy = db.deployment_create({"config": {"opt": "val"}})
|
||||
deploys = db.deployment_list()
|
||||
self.assertEqual(1, len(deploys))
|
||||
self.assertEqual(deploys[0]["uuid"], deploy["uuid"])
|
||||
self.assertEqual(consts.DeployStatus.DEPLOY_INIT, deploy["status"])
|
||||
self.assertEqual({"opt": "val"}, deploy["config"])
|
||||
self.assertEqual({}, deploy["credentials"])
|
||||
|
||||
def test_deployment_create_several(self):
|
||||
# Create a deployment
|
||||
deploys = db.deployment_list()
|
||||
self.assertEqual(0, len(deploys))
|
||||
deploy_one = db.deployment_create({"config": {"opt1": "val1"}})
|
||||
deploys = db.deployment_list()
|
||||
self.assertEqual(1, len(deploys))
|
||||
self.assertEqual(deploys[0]["uuid"], deploy_one["uuid"])
|
||||
self.assertEqual(consts.DeployStatus.DEPLOY_INIT, deploy_one["status"])
|
||||
self.assertEqual({"opt1": "val1"}, deploy_one["config"])
|
||||
|
||||
# Create another deployment and sure that they are different
|
||||
deploy_two = db.deployment_create({"config": {"opt2": "val2"}})
|
||||
deploys = db.deployment_list()
|
||||
self.assertEqual(2, len(deploys))
|
||||
self.assertEqual(set([deploy_one["uuid"], deploy_two["uuid"]]),
|
||||
set([deploy["uuid"] for deploy in deploys]))
|
||||
self.assertNotEqual(deploy_one["uuid"], deploy_two["uuid"])
|
||||
self.assertEqual(consts.DeployStatus.DEPLOY_INIT, deploy_two["status"])
|
||||
self.assertEqual({"opt2": "val2"}, deploy_two["config"])
|
||||
|
||||
def test_deployment_update(self):
|
||||
credentials = {
|
||||
"openstack": [{"admin": {"foo": "bar"}, "users": ["foo_user"]}]}
|
||||
deploy = db.deployment_create({})
|
||||
self.assertEqual({}, deploy["config"])
|
||||
self.assertEqual({}, deploy["credentials"])
|
||||
update_deploy = db.deployment_update(
|
||||
deploy["uuid"], {"config": {"opt": "val"},
|
||||
"credentials": copy.deepcopy(credentials)})
|
||||
self.assertEqual(deploy["uuid"], update_deploy["uuid"])
|
||||
self.assertEqual({"opt": "val"}, update_deploy["config"])
|
||||
self.assertEqual(credentials, update_deploy["credentials"])
|
||||
get_deploy = db.deployment_get(deploy["uuid"])
|
||||
self.assertEqual(deploy["uuid"], get_deploy["uuid"])
|
||||
self.assertEqual({"opt": "val"}, get_deploy["config"])
|
||||
self.assertEqual(credentials, update_deploy["credentials"])
|
||||
|
||||
def test_deployment_update_several(self):
|
||||
# Create a deployment and update it
|
||||
deploy_one = db.deployment_create({})
|
||||
self.assertEqual({}, deploy_one["config"])
|
||||
update_deploy_one = db.deployment_update(
|
||||
deploy_one["uuid"], {"config": {"opt1": "val1"}})
|
||||
self.assertEqual(deploy_one["uuid"], update_deploy_one["uuid"])
|
||||
self.assertEqual({"opt1": "val1"}, update_deploy_one["config"])
|
||||
get_deploy_one = db.deployment_get(deploy_one["uuid"])
|
||||
self.assertEqual(deploy_one["uuid"], get_deploy_one["uuid"])
|
||||
self.assertEqual({"opt1": "val1"}, get_deploy_one["config"])
|
||||
|
||||
# Create another deployment
|
||||
deploy_two = db.deployment_create({})
|
||||
update_deploy_two = db.deployment_update(
|
||||
deploy_two["uuid"], {"config": {"opt2": "val2"}})
|
||||
self.assertEqual(deploy_two["uuid"], update_deploy_two["uuid"])
|
||||
self.assertEqual({"opt2": "val2"}, update_deploy_two["config"])
|
||||
get_deploy_one_again = db.deployment_get(deploy_one["uuid"])
|
||||
self.assertEqual(deploy_one["uuid"], get_deploy_one_again["uuid"])
|
||||
self.assertEqual({"opt1": "val1"}, get_deploy_one_again["config"])
|
||||
|
||||
def test_deployment_get(self):
|
||||
deploy_one = db.deployment_create({"config": {"opt1": "val1"}})
|
||||
deploy_two = db.deployment_create({"config": {"opt2": "val2"}})
|
||||
get_deploy_one = db.deployment_get(deploy_one["uuid"])
|
||||
get_deploy_two = db.deployment_get(deploy_two["uuid"])
|
||||
self.assertNotEqual(get_deploy_one["uuid"], get_deploy_two["uuid"])
|
||||
self.assertEqual({"opt1": "val1"}, get_deploy_one["config"])
|
||||
self.assertEqual({"opt2": "val2"}, get_deploy_two["config"])
|
||||
|
||||
def test_deployment_get_not_found(self):
|
||||
self.assertRaises(exceptions.DBRecordNotFound,
|
||||
db.deployment_get,
|
||||
"852e932b-9552-4b2d-89e3-a5915780a5e3")
|
||||
|
||||
def test_deployment_list(self):
|
||||
deploy_one = db.deployment_create({})
|
||||
deploy_two = db.deployment_create({})
|
||||
deploys = db.deployment_list()
|
||||
self.assertEqual(sorted([deploy_one["uuid"], deploy_two["uuid"]]),
|
||||
sorted([deploy["uuid"] for deploy in deploys]))
|
||||
|
||||
def test_deployment_list_with_status_and_name(self):
|
||||
deploy_one = db.deployment_create({})
|
||||
deploy_two = db.deployment_create({
|
||||
"config": {},
|
||||
"status": consts.DeployStatus.DEPLOY_FAILED,
|
||||
})
|
||||
deploy_three = db.deployment_create({"name": "deployment_name"})
|
||||
deploys = db.deployment_list(status=consts.DeployStatus.DEPLOY_INIT)
|
||||
deploys.sort(key=lambda x: x["id"])
|
||||
self.assertEqual(2, len(deploys))
|
||||
self.assertEqual(deploy_one["uuid"], deploys[0]["uuid"])
|
||||
deploys = db.deployment_list(status=consts.DeployStatus.DEPLOY_FAILED)
|
||||
self.assertEqual(1, len(deploys))
|
||||
self.assertEqual(deploy_two["uuid"], deploys[0]["uuid"])
|
||||
deploys = db.deployment_list(
|
||||
status=consts.DeployStatus.DEPLOY_FINISHED)
|
||||
self.assertEqual(0, len(deploys))
|
||||
deploys = db.deployment_list(name="deployment_name")
|
||||
self.assertEqual(deploy_three["uuid"], deploys[0]["uuid"])
|
||||
self.assertEqual(1, len(deploys))
|
||||
|
||||
def test_deployment_list_parent(self):
|
||||
deploy = db.deployment_create({})
|
||||
subdeploy1 = db.deployment_create({"parent_uuid": deploy["uuid"]})
|
||||
subdeploy2 = db.deployment_create({"parent_uuid": deploy["uuid"]})
|
||||
self.assertEqual(
|
||||
[deploy["uuid"]], [d["uuid"] for d in db.deployment_list()])
|
||||
subdeploys = db.deployment_list(parent_uuid=deploy["uuid"])
|
||||
self.assertEqual(set([subdeploy1["uuid"], subdeploy2["uuid"]]),
|
||||
set([d["uuid"] for d in subdeploys]))
|
||||
|
||||
def test_deployment_delete(self):
|
||||
deploy_one = db.deployment_create({})
|
||||
deploy_two = db.deployment_create({})
|
||||
db.deployment_delete(deploy_two["uuid"])
|
||||
deploys = db.deployment_list()
|
||||
self.assertEqual(1, len(deploys))
|
||||
self.assertEqual(deploy_one["uuid"], deploys[0]["uuid"])
|
||||
|
||||
def test_deployment_delete_not_found(self):
|
||||
self.assertRaises(exceptions.DBRecordNotFound,
|
||||
db.deployment_delete,
|
||||
"5f2883be-46c8-4c4b-a4fe-988ad0c6b20a")
|
||||
|
||||
def test_deployment_delete_is_busy(self):
|
||||
deployment = db.deployment_create({})
|
||||
db.resource_create({"deployment_uuid": deployment["uuid"]})
|
||||
db.resource_create({"deployment_uuid": deployment["uuid"]})
|
||||
self.assertRaises(exceptions.DBConflict,
|
||||
db.deployment_delete, deployment["uuid"])
|
||||
|
||||
|
||||
class ResourceTestCase(test.DBTestCase):
|
||||
def test_create(self):
|
||||
deployment = db.deployment_create({})
|
||||
resource = db.resource_create({
|
||||
"deployment_uuid": deployment["uuid"],
|
||||
"provider_name": "fakeprovider",
|
||||
"type": "faketype",
|
||||
})
|
||||
resources = db.resource_get_all(deployment["uuid"])
|
||||
self.assertTrue(resource["id"])
|
||||
self.assertEqual(1, len(resources))
|
||||
self.assertTrue(resources[0]["id"], resource["id"])
|
||||
self.assertEqual(deployment["uuid"], resource["deployment_uuid"])
|
||||
self.assertEqual("fakeprovider", resource["provider_name"])
|
||||
self.assertEqual("faketype", resource["type"])
|
||||
|
||||
def test_delete(self):
|
||||
deployment = db.deployment_create({})
|
||||
res = db.resource_create({"deployment_uuid": deployment["uuid"]})
|
||||
db.resource_delete(res["id"])
|
||||
resources = db.resource_get_all(deployment["uuid"])
|
||||
self.assertEqual(0, len(resources))
|
||||
|
||||
def test_delete_not_found(self):
|
||||
self.assertRaises(exceptions.DBRecordNotFound,
|
||||
db.resource_delete, 123456789)
|
||||
|
||||
def test_get_all(self):
|
||||
deployment0 = db.deployment_create({})
|
||||
deployment1 = db.deployment_create({})
|
||||
res0 = db.resource_create({"deployment_uuid": deployment0["uuid"]})
|
||||
res1 = db.resource_create({"deployment_uuid": deployment1["uuid"]})
|
||||
res2 = db.resource_create({"deployment_uuid": deployment1["uuid"]})
|
||||
resources = db.resource_get_all(deployment1["uuid"])
|
||||
self.assertEqual(sorted([res1["id"], res2["id"]]),
|
||||
sorted([r["id"] for r in resources]))
|
||||
resources = db.resource_get_all(deployment0["uuid"])
|
||||
self.assertEqual(1, len(resources))
|
||||
self.assertEqual(res0["id"], resources[0]["id"])
|
||||
|
||||
def test_get_all_by_provider_name(self):
|
||||
deployment = db.deployment_create({})
|
||||
res_one = db.resource_create({
|
||||
"deployment_uuid": deployment["uuid"],
|
||||
"provider_name": "one",
|
||||
})
|
||||
res_two = db.resource_create({
|
||||
"deployment_uuid": deployment["uuid"],
|
||||
"provider_name": "two",
|
||||
})
|
||||
resources = db.resource_get_all(deployment["uuid"],
|
||||
provider_name="one")
|
||||
self.assertEqual(1, len(resources))
|
||||
self.assertEqual(res_one["id"], resources[0]["id"])
|
||||
resources = db.resource_get_all(deployment["uuid"],
|
||||
provider_name="two")
|
||||
self.assertEqual(1, len(resources))
|
||||
self.assertEqual(res_two["id"], resources[0]["id"])
|
||||
|
||||
def test_get_all_by_provider_type(self):
|
||||
deployment = db.deployment_create({})
|
||||
res_one = db.resource_create({
|
||||
"deployment_uuid": deployment["uuid"],
|
||||
"type": "one",
|
||||
})
|
||||
res_two = db.resource_create({
|
||||
"deployment_uuid": deployment["uuid"],
|
||||
"type": "two",
|
||||
})
|
||||
resources = db.resource_get_all(deployment["uuid"], type="one")
|
||||
self.assertEqual(1, len(resources))
|
||||
self.assertEqual(res_one["id"], resources[0]["id"])
|
||||
resources = db.resource_get_all(deployment["uuid"], type="two")
|
||||
self.assertEqual(1, len(resources))
|
||||
self.assertEqual(res_two["id"], resources[0]["id"])
|
||||
|
||||
|
||||
class VerifierTestCase(test.DBTestCase):
|
||||
def test_verifier_create(self):
|
||||
v = db.verifier_create("a", "b", "c", "d", "e", False)
|
||||
@ -1082,38 +861,39 @@ class VerificationTestCase(test.DBTestCase):
|
||||
super(VerificationTestCase, self).setUp()
|
||||
|
||||
self.verifier = db.verifier_create("a", "b", "c", "d", "e", False)
|
||||
self.deploy = db.deployment_create({})
|
||||
self.env = db.env_create(self.id(), "INIT", "", {}, {}, {})
|
||||
|
||||
def _create_verification(self):
|
||||
def _create_verification(self, tags=None, env_uuid=None):
|
||||
tags = tags or []
|
||||
verifier_uuid = self.verifier["uuid"]
|
||||
deployment_uuid = self.deploy["uuid"]
|
||||
return db.verification_create(verifier_uuid, deployment_uuid, [], {})
|
||||
env_uuid = env_uuid or self.env["uuid"]
|
||||
return db.verification_create(verifier_uuid, env_uuid, tags, {})
|
||||
|
||||
def test_verification_create(self):
|
||||
v = self._create_verification()
|
||||
self.assertEqual(self.verifier["uuid"], v["verifier_uuid"])
|
||||
self.assertEqual(self.deploy["uuid"], v["deployment_uuid"])
|
||||
self.assertEqual(self.env["uuid"], v["env_uuid"])
|
||||
|
||||
def test_verification_get(self):
|
||||
v = db.verification_get(self._create_verification()["uuid"])
|
||||
self.assertEqual(self.verifier["uuid"], v["verifier_uuid"])
|
||||
self.assertEqual(self.deploy["uuid"], v["deployment_uuid"])
|
||||
self.assertEqual(self.env["uuid"], v["env_uuid"])
|
||||
|
||||
def test_verification_get_raise_exc(self):
|
||||
self.assertRaises(exceptions.DBRecordNotFound, db.verification_get,
|
||||
"1234")
|
||||
|
||||
def test_verification_list(self):
|
||||
deploy = db.deployment_create({})
|
||||
v1 = db.verification_create(
|
||||
self.verifier["uuid"], deploy["uuid"], ["foo", "bar"], {})
|
||||
another_env = db.env_create(self.id() + "2", "INIT", "", {}, {}, {})
|
||||
v1 = self._create_verification(tags=["foo", "bar"],
|
||||
env_uuid=another_env["uuid"])
|
||||
v2 = self._create_verification()
|
||||
|
||||
vs = db.verification_list(self.verifier["uuid"])
|
||||
self.assertEqual(sorted([v1["uuid"], v2["uuid"]]),
|
||||
sorted([v["uuid"] for v in vs]))
|
||||
|
||||
vs = db.verification_list(self.verifier["uuid"], deploy["uuid"])
|
||||
vs = db.verification_list(self.verifier["uuid"], another_env["uuid"])
|
||||
self.assertEqual(1, len(vs))
|
||||
self.assertEqual(v1["uuid"], vs[0]["uuid"])
|
||||
|
||||
|
@ -24,6 +24,7 @@ import pprint
|
||||
import uuid
|
||||
|
||||
import alembic
|
||||
import jsonschema
|
||||
import mock
|
||||
from oslo_db.sqlalchemy import test_migrations
|
||||
from oslo_db.sqlalchemy import utils as db_utils
|
||||
@ -36,7 +37,6 @@ from rally.common import db
|
||||
from rally.common.db.sqlalchemy import api
|
||||
from rally.common.db.sqlalchemy import models
|
||||
from rally import consts
|
||||
from rally.deployment.engines import existing
|
||||
from tests.unit.common.db import test_migrations_base
|
||||
from tests.unit import test as rtest
|
||||
|
||||
@ -346,6 +346,71 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
"users": six.b(json.dumps([]))
|
||||
}])
|
||||
|
||||
_OLD_DEPLOYMENT_SCHEMA = {
|
||||
"type": "object",
|
||||
"description": "Deprecated schema (openstack only)",
|
||||
"properties": {
|
||||
"type": {"type": "string"},
|
||||
"auth_url": {"type": "string"},
|
||||
"region_name": {"type": "string"},
|
||||
"endpoint": {"type": ["string", "null"]},
|
||||
"endpoint_type": {"enum": [consts.EndpointType.ADMIN,
|
||||
consts.EndpointType.INTERNAL,
|
||||
consts.EndpointType.PUBLIC,
|
||||
None]},
|
||||
"https_insecure": {"type": "boolean"},
|
||||
"https_cacert": {"type": "string"},
|
||||
"profiler_hmac_key": {"type": ["string", "null"]},
|
||||
"profiler_conn_str": {"type": ["string", "null"]},
|
||||
"admin": {"$ref": "#/definitions/user"},
|
||||
"users": {"type": "array",
|
||||
"items": {"$ref": "#/definitions/user"},
|
||||
"minItems": 1},
|
||||
"extra": {"type": "object", "additionalProperties": True}
|
||||
},
|
||||
"anyOf": [
|
||||
{"description": "The case when the admin is specified and the "
|
||||
"users can be created via 'users' context or "
|
||||
"'existing_users' will be used.",
|
||||
"required": ["type", "auth_url", "admin"]},
|
||||
{"description": "The case when the only existing users are "
|
||||
"specified.",
|
||||
"required": ["type", "auth_url", "users"]}
|
||||
],
|
||||
"additionalProperties": False,
|
||||
"definitions": {
|
||||
"user": {
|
||||
"type": "object",
|
||||
"oneOf": [
|
||||
{
|
||||
"description": "Keystone V2.0",
|
||||
"properties": {
|
||||
"username": {"type": "string"},
|
||||
"password": {"type": "string"},
|
||||
"tenant_name": {"type": "string"},
|
||||
},
|
||||
"required": ["username", "password", "tenant_name"],
|
||||
"additionalProperties": False
|
||||
},
|
||||
{
|
||||
"description": "Keystone V3.0",
|
||||
"properties": {
|
||||
"username": {"type": "string"},
|
||||
"password": {"type": "string"},
|
||||
"domain_name": {"type": "string"},
|
||||
"user_domain_name": {"type": "string"},
|
||||
"project_name": {"type": "string"},
|
||||
"project_domain_name": {"type": "string"},
|
||||
},
|
||||
"required": ["username", "password", "project_name"],
|
||||
"additionalProperties": False
|
||||
}
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
def _check_54e844ebfbc3(self, engine, data):
|
||||
self.assertEqual("54e844ebfbc3",
|
||||
api.get_backend().schema_revision(engine=engine))
|
||||
@ -377,7 +442,7 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
self.assertEqual(endpoint_type,
|
||||
config["endpoint_type"])
|
||||
|
||||
existing.ExistingCloud({"config": config}).validate()
|
||||
jsonschema.validate(config, self._OLD_DEPLOYMENT_SCHEMA)
|
||||
else:
|
||||
if not deployment.uuid.startswith("should-not-be-changed"):
|
||||
self.fail("Config of deployment '%s' is not changes, "
|
||||
@ -867,7 +932,7 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
self.assertEqual(endpoint_type,
|
||||
config["endpoint_type"])
|
||||
|
||||
existing.ExistingCloud({"config": config}).validate()
|
||||
jsonschema.validate(config, self._OLD_DEPLOYMENT_SCHEMA)
|
||||
else:
|
||||
if not deployment.uuid.startswith("should-not-be-changed"):
|
||||
self.fail("Config of deployment '%s' is not changes, "
|
||||
@ -2285,3 +2350,174 @@ class MigrationWalkTestCase(rtest.DBTestCase,
|
||||
conn.execute(
|
||||
deployment_table.delete().where(
|
||||
deployment_table.c.uuid == deployment_uuid))
|
||||
|
||||
def _pre_upgrade_7287df262dbc(self, engine):
|
||||
deployment_table = db_utils.get_table(engine, "deployments")
|
||||
task_table = db_utils.get_table(engine, "tasks")
|
||||
verifier_table = db_utils.get_table(engine, "verifiers")
|
||||
verification_table = db_utils.get_table(engine, "verifications")
|
||||
|
||||
self._7287df262dbc_deployments = [
|
||||
# empty config
|
||||
(str(uuid.uuid4()), {"type": "ExistingCloud", "creds": {}}),
|
||||
# OpenStack default config
|
||||
(str(uuid.uuid4()), {
|
||||
"type": "ExistingCloud",
|
||||
"creds": {
|
||||
"openstack": {
|
||||
"auth_url": "http://example.net:5000/v2.0/",
|
||||
"region_name": "RegionOne",
|
||||
"endpoint_type": "public",
|
||||
"admin": {
|
||||
"username": "admin",
|
||||
"password": "myadminpass",
|
||||
"tenant_name": "demo"
|
||||
},
|
||||
"https_insecure": False,
|
||||
"https_cacert": ""
|
||||
}
|
||||
}
|
||||
}),
|
||||
# some custom unknown thing
|
||||
(str(uuid.uuid4()), {"some_special_deployment": "foo"})
|
||||
]
|
||||
self._7287df262dbc_task_uuid = str(uuid.uuid4())
|
||||
self._7287df262dbc_verifier_uuid = str(uuid.uuid4())
|
||||
self._7287df262dbc_verification_uuid = str(uuid.uuid4())
|
||||
|
||||
with engine.connect() as conn:
|
||||
conn.execute(
|
||||
deployment_table.insert(),
|
||||
[{
|
||||
"uuid": d_uuid,
|
||||
"name": str(uuid.uuid4()),
|
||||
"config": (
|
||||
json.dumps(d_cfg) if d_cfg
|
||||
else six.b(json.dumps(d_cfg))),
|
||||
"enum_deployments_status": consts.DeployStatus.DEPLOY_INIT,
|
||||
"credentials": six.b(json.dumps([]))
|
||||
} for d_uuid, d_cfg in self._7287df262dbc_deployments]
|
||||
)
|
||||
|
||||
conn.execute(
|
||||
task_table.insert(),
|
||||
[{
|
||||
"uuid": self._7287df262dbc_task_uuid,
|
||||
"created_at": timeutils.utcnow(),
|
||||
"updated_at": timeutils.utcnow(),
|
||||
"status": consts.TaskStatus.FINISHED,
|
||||
"validation_result": six.b(json.dumps({})),
|
||||
"deployment_uuid": self._7287df262dbc_deployments[0][0]
|
||||
}]
|
||||
)
|
||||
|
||||
conn.execute(
|
||||
verifier_table.insert(),
|
||||
[{
|
||||
"uuid": self._7287df262dbc_verifier_uuid,
|
||||
"name": str(uuid.uuid4()),
|
||||
"type": str(uuid.uuid4()),
|
||||
"created_at": timeutils.utcnow(),
|
||||
"updated_at": timeutils.utcnow(),
|
||||
"status": consts.VerifierStatus.INIT
|
||||
}]
|
||||
)
|
||||
|
||||
conn.execute(
|
||||
verification_table.insert(),
|
||||
[{
|
||||
"uuid": self._7287df262dbc_verification_uuid,
|
||||
"deployment_uuid": self._7287df262dbc_deployments[0][0],
|
||||
"verifier_uuid": self._7287df262dbc_verifier_uuid,
|
||||
"status": consts.VerificationStatus.INIT,
|
||||
"created_at": timeutils.utcnow(),
|
||||
"updated_at": timeutils.utcnow(),
|
||||
}]
|
||||
)
|
||||
|
||||
def _check_7287df262dbc(self, engine, data):
|
||||
env_table = db_utils.get_table(engine, "envs")
|
||||
platform_table = db_utils.get_table(engine, "platforms")
|
||||
task_table = db_utils.get_table(engine, "tasks")
|
||||
verifier_table = db_utils.get_table(engine, "verifiers")
|
||||
verification_table = db_utils.get_table(engine, "verifications")
|
||||
|
||||
with engine.connect() as conn:
|
||||
|
||||
task = conn.execute(task_table.select().where(
|
||||
task_table.c.uuid == self._7287df262dbc_task_uuid)).first()
|
||||
self.assertNotIn("deployment_uuid", task)
|
||||
self.assertIn("env_uuid", task)
|
||||
self.assertEqual(self._7287df262dbc_deployments[0][0],
|
||||
task["env_uuid"])
|
||||
conn.execute(
|
||||
task_table.delete().where(
|
||||
task_table.c.uuid == self._7287df262dbc_task_uuid))
|
||||
|
||||
v_id = self._7287df262dbc_verification_uuid
|
||||
verification = conn.execute(verification_table.select().where(
|
||||
verification_table.c.uuid == v_id)).first()
|
||||
self.assertNotIn("deployment_uuid", verification)
|
||||
self.assertIn("env_uuid", verification)
|
||||
self.assertEqual(self._7287df262dbc_deployments[0][0],
|
||||
verification["env_uuid"])
|
||||
conn.execute(
|
||||
verification_table.delete().where(
|
||||
verification_table.c.uuid == v_id))
|
||||
conn.execute(
|
||||
verifier_table.delete().where(
|
||||
verifier_table.c.uuid == self._7287df262dbc_verifier_uuid))
|
||||
|
||||
for d_uuid, d_cfg in self._7287df262dbc_deployments:
|
||||
env = conn.execute(env_table.select().where(
|
||||
env_table.c.uuid == d_uuid)).first()
|
||||
if d_cfg.get("creds", {}):
|
||||
# openstack deployment
|
||||
env_spec = json.loads(env["spec"])
|
||||
self.assertEqual({"existing@openstack"},
|
||||
set(env_spec.keys()))
|
||||
self.assertEqual(
|
||||
d_cfg["creds"]["openstack"],
|
||||
env_spec["existing@openstack"])
|
||||
|
||||
platforms = conn.execute(platform_table.select().where(
|
||||
platform_table.c.env_uuid == d_uuid)).fetchall()
|
||||
self.assertEqual(1, len(platforms))
|
||||
self.assertEqual("READY", platforms[0].status)
|
||||
self.assertEqual("existing@openstack",
|
||||
platforms[0].plugin_name)
|
||||
self.assertEqual(env_spec["existing@openstack"],
|
||||
json.loads(platforms[0].plugin_spec))
|
||||
self.assertEqual("openstack",
|
||||
platforms[0].platform_name)
|
||||
self.assertEqual(
|
||||
{"admin": {
|
||||
"username": "admin",
|
||||
"tenant_name": "demo",
|
||||
"password": "myadminpass",
|
||||
"region_name": "RegionOne",
|
||||
"https_insecure": False,
|
||||
"https_cacert": "",
|
||||
"endpoint_type": "public",
|
||||
"auth_url": "http://example.net:5000/v2.0/"},
|
||||
"users": []},
|
||||
json.loads(platforms[0].platform_data))
|
||||
|
||||
conn.execute(
|
||||
platform_table.delete().where(
|
||||
platform_table.c.env_uuid == d_uuid))
|
||||
else:
|
||||
if "creds" in d_cfg:
|
||||
# empty deployment
|
||||
self.assertEqual({}, json.loads(env["spec"]))
|
||||
else:
|
||||
# something
|
||||
self.assertEqual(d_cfg, json.loads(env["spec"]))
|
||||
|
||||
platforms = conn.execute(platform_table.select().where(
|
||||
platform_table.c.env_uuid == d_uuid)).fetchall()
|
||||
self.assertEqual(0, len(platforms))
|
||||
|
||||
conn.execute(
|
||||
env_table.delete().where(
|
||||
env_table.c.uuid == d_uuid))
|
||||
|
@ -21,6 +21,7 @@ import mock
|
||||
|
||||
from rally.common import objects
|
||||
from rally import consts
|
||||
from rally.env import env_mgr
|
||||
from rally import exceptions
|
||||
from tests.unit import test
|
||||
|
||||
@ -30,112 +31,37 @@ class DeploymentTestCase(test.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(DeploymentTestCase, self).setUp()
|
||||
self.deployment = {
|
||||
self.env = mock.MagicMock()
|
||||
self.env.data = {
|
||||
"id": 1,
|
||||
"uuid": "baa1bfb6-0c38-4f6c-9bd0-45968890e4f4",
|
||||
"created_at": None,
|
||||
"updated_at": None,
|
||||
"name": "",
|
||||
"config": {},
|
||||
"credentials": {},
|
||||
"status": consts.DeployStatus.DEPLOY_INIT,
|
||||
}
|
||||
self.resource = {
|
||||
"id": 42,
|
||||
"deployment_uuid": self.deployment["uuid"],
|
||||
"provider_name": "provider",
|
||||
"type": "some",
|
||||
"info": {"key": "value"},
|
||||
"description": "",
|
||||
"status": env_mgr.STATUS.INIT,
|
||||
"spec": {},
|
||||
"extras": {},
|
||||
"platforms": []
|
||||
}
|
||||
|
||||
@mock.patch("rally.common.objects.deploy.db.deployment_create")
|
||||
def test_init_with_create(self, mock_deployment_create):
|
||||
mock_deployment_create.return_value = self.deployment
|
||||
@mock.patch("rally.common.objects.deploy.env_mgr.EnvManager.create")
|
||||
def test_init(self, mock_env_manager_create):
|
||||
objects.Deployment(mock.MagicMock(data={"platforms": [], "spec": {}}))
|
||||
self.assertFalse(mock_env_manager_create.called)
|
||||
deploy = objects.Deployment()
|
||||
mock_deployment_create.assert_called_once_with({})
|
||||
self.assertEqual(self.deployment["uuid"], deploy["uuid"])
|
||||
|
||||
@mock.patch("rally.common.objects.deploy.db.deployment_create")
|
||||
def test_init_without_create(self, mock_deployment_create):
|
||||
deploy = objects.Deployment(deployment=self.deployment)
|
||||
self.assertFalse(mock_deployment_create.called)
|
||||
self.assertEqual(self.deployment["uuid"], deploy["uuid"])
|
||||
|
||||
@mock.patch("rally.common.objects.deploy.db.deployment_get")
|
||||
def test_get(self, mock_deployment_get):
|
||||
mock_deployment_get.return_value = self.deployment
|
||||
deploy = objects.Deployment.get(self.deployment["uuid"])
|
||||
mock_deployment_get.assert_called_once_with(self.deployment["uuid"])
|
||||
self.assertEqual(self.deployment["uuid"], deploy["uuid"])
|
||||
|
||||
@mock.patch("rally.common.objects.deploy.db.deployment_delete")
|
||||
@mock.patch("rally.common.objects.deploy.db.deployment_create")
|
||||
def test_create_and_delete(self, mock_deployment_create,
|
||||
mock_deployment_delete):
|
||||
mock_deployment_create.return_value = self.deployment
|
||||
deploy = objects.Deployment()
|
||||
deploy.delete()
|
||||
mock_deployment_delete.assert_called_once_with(self.deployment["uuid"])
|
||||
|
||||
@mock.patch("rally.common.objects.deploy.db.deployment_delete")
|
||||
def test_delete_by_uuid(self, mock_deployment_delete):
|
||||
objects.Deployment.delete_by_uuid(self.deployment["uuid"])
|
||||
mock_deployment_delete.assert_called_once_with(self.deployment["uuid"])
|
||||
|
||||
@mock.patch("rally.common.objects.deploy.db.deployment_update")
|
||||
@mock.patch("rally.common.objects.deploy.db.deployment_create")
|
||||
def test_update(self, mock_deployment_create, mock_deployment_update):
|
||||
mock_deployment_create.return_value = self.deployment
|
||||
mock_deployment_update.return_value = {"opt": "val2"}
|
||||
deploy = objects.Deployment(opt="val1")
|
||||
deploy._update({"opt": "val2"})
|
||||
mock_deployment_update.assert_called_once_with(
|
||||
self.deployment["uuid"], {"opt": "val2"})
|
||||
self.assertEqual("val2", deploy["opt"])
|
||||
|
||||
@mock.patch("rally.common.objects.deploy.db.deployment_update")
|
||||
def test_update_status(self, mock_deployment_update):
|
||||
mock_deployment_update.return_value = self.deployment
|
||||
deploy = objects.Deployment(deployment=self.deployment)
|
||||
deploy.update_status(consts.DeployStatus.DEPLOY_FAILED)
|
||||
mock_deployment_update.assert_called_once_with(
|
||||
self.deployment["uuid"],
|
||||
{"status": consts.DeployStatus.DEPLOY_FAILED},
|
||||
mock_env_manager_create.assert_called_once_with(
|
||||
name=None, description="", spec={}, extras={}
|
||||
)
|
||||
self.assertEqual(mock_env_manager_create.return_value.uuid,
|
||||
deploy["uuid"])
|
||||
|
||||
@mock.patch("rally.common.objects.deploy.db.deployment_update")
|
||||
def test_update_name(self, mock_deployment_update):
|
||||
mock_deployment_update.return_value = self.deployment
|
||||
deploy = objects.Deployment(deployment=self.deployment)
|
||||
deploy.update_name("new_name")
|
||||
mock_deployment_update.assert_called_once_with(
|
||||
self.deployment["uuid"],
|
||||
{"name": "new_name"},
|
||||
)
|
||||
|
||||
@mock.patch("rally.common.objects.deploy.db.deployment_update")
|
||||
def test_update_config(self, mock_deployment_update):
|
||||
mock_deployment_update.return_value = self.deployment
|
||||
deploy = objects.Deployment(deployment=self.deployment)
|
||||
deploy.update_config({"opt": "val"})
|
||||
mock_deployment_update.assert_called_once_with(
|
||||
self.deployment["uuid"],
|
||||
{"config": {"opt": "val"}},
|
||||
)
|
||||
|
||||
@mock.patch("rally.deployment.credential.get")
|
||||
@mock.patch("rally.common.objects.deploy.db.deployment_update")
|
||||
def test_update_credentials(self, mock_deployment_update,
|
||||
mock_credential_get):
|
||||
mock_deployment_update.return_value = self.deployment
|
||||
deploy = objects.Deployment(deployment=self.deployment)
|
||||
credentials = {"foo": [{"admin": {"fake_admin": True},
|
||||
"users": [{"fake_user": True}]}]}
|
||||
|
||||
deploy.update_credentials(credentials)
|
||||
mock_deployment_update.assert_called_once_with(
|
||||
self.deployment["uuid"],
|
||||
{
|
||||
"credentials": {"foo": [{"admin": {"fake_admin": True},
|
||||
"users": [{"fake_user": True}]}]}
|
||||
})
|
||||
@mock.patch("rally.common.objects.deploy.env_mgr.EnvManager.get")
|
||||
def test_get(self, mock_env_manager_get):
|
||||
mock_env_manager_get.return_value = self.env
|
||||
deploy = objects.Deployment.get(self.env.data["uuid"])
|
||||
mock_env_manager_get.assert_called_once_with(self.env.data["uuid"])
|
||||
self.assertEqual(self.env.uuid, deploy["uuid"])
|
||||
|
||||
@mock.patch("rally.deployment.credential.get")
|
||||
def test_get_validation_context(self, mock_credential_get):
|
||||
@ -143,9 +69,9 @@ class DeploymentTestCase(test.TestCase):
|
||||
credential_cls.get_validation_context.side_effect = [
|
||||
{"foo_test": "test"}, {"boo_test": "boo"}
|
||||
]
|
||||
credentials = {"foo": [], "boo": []}
|
||||
self.deployment["credentials"] = credentials
|
||||
deploy = objects.Deployment(deployment=self.deployment)
|
||||
deploy = objects.Deployment(deployment=self.env)
|
||||
deploy._all_credentials = {"foo": [], "boo": []}
|
||||
|
||||
self.assertEqual({"foo_test": "test", "boo_test": "boo"},
|
||||
deploy.get_validation_context())
|
||||
|
||||
@ -153,36 +79,40 @@ class DeploymentTestCase(test.TestCase):
|
||||
[mock.call("foo"), mock.call("boo")],
|
||||
any_order=True)
|
||||
|
||||
@mock.patch("rally.common.objects.deploy.Deployment.get_all_credentials")
|
||||
def test_verify_connections(self, mock_get_all_credentials):
|
||||
def test_verify_connections(self):
|
||||
deploy = objects.Deployment(deployment=self.env)
|
||||
|
||||
creds = [mock.MagicMock(), mock.MagicMock(), mock.MagicMock()]
|
||||
mock_get_all_credentials.return_value = {
|
||||
"foo": [{"admin": creds[0], "users": [creds[1], creds[2]]}]
|
||||
}
|
||||
deploy = objects.Deployment(deployment=self.deployment)
|
||||
self.env.check_health.return_value = {"foo": {"available": True}}
|
||||
deploy.verify_connections()
|
||||
for c in creds:
|
||||
c.verify_connection.assert_called_once_with()
|
||||
self.env.check_health.assert_called_once_with()
|
||||
|
||||
self.env.check_health.return_value = {"foo": {"available": False,
|
||||
"message": "Ooops"}}
|
||||
e = self.assertRaises(exceptions.RallyException,
|
||||
deploy.verify_connections)
|
||||
self.assertEqual("Platform foo is not available: Ooops.", "%s" % e)
|
||||
|
||||
def test_get_platforms(self):
|
||||
self.deployment["credentials"] = {"foo": {"admin": None, "users": []},
|
||||
"bar": {"admin": None, "users": []}}
|
||||
deploy = objects.Deployment(deployment=self.deployment)
|
||||
self.assertEqual({"foo", "bar"}, set(deploy.get_platforms()))
|
||||
|
||||
def test_get_platforms_empty(self):
|
||||
deploy = objects.Deployment(deployment=self.deployment)
|
||||
deploy = objects.Deployment(deployment=self.env)
|
||||
self.assertEqual([], list(deploy.get_platforms()))
|
||||
|
||||
self.env.data["platforms"] = [
|
||||
{"plugin_name": "existing@openstack", "platform_data": {}},
|
||||
{"plugin_name": "foo", "platform_data": {}}
|
||||
]
|
||||
|
||||
deploy = objects.Deployment(deployment=self.env)
|
||||
|
||||
self.assertEqual({"openstack", "foo"}, set(deploy.get_platforms()))
|
||||
|
||||
@mock.patch("rally.deployment.credential.get")
|
||||
def test_get_credentials_for(self, mock_credential_get):
|
||||
credential_cls = mock_credential_get.return_value
|
||||
credential_inst = credential_cls.return_value
|
||||
credentials = {"foo": [{"admin": {"fake_admin": True},
|
||||
"users": [{"fake_user": True}]}]}
|
||||
self.deployment["credentials"] = credentials
|
||||
deploy = objects.Deployment(deployment=self.deployment)
|
||||
deploy = objects.Deployment(deployment=self.env)
|
||||
deploy._all_credentials = {"foo": [{"admin": {"fake_admin": True},
|
||||
"users": [{"fake_user": True}]}]}
|
||||
|
||||
creds = deploy.get_credentials_for("foo")
|
||||
|
||||
mock_credential_get.assert_called_once_with("foo")
|
||||
@ -195,7 +125,7 @@ class DeploymentTestCase(test.TestCase):
|
||||
"users": [credential_inst]}, creds)
|
||||
|
||||
def test_get_credentials_for_default(self):
|
||||
deploy = objects.Deployment(deployment=self.deployment)
|
||||
deploy = objects.Deployment(deployment=self.env)
|
||||
creds = deploy.get_credentials_for("default")
|
||||
self.assertEqual({"admin": None, "users": []}, creds)
|
||||
|
||||
@ -218,7 +148,8 @@ class DeploymentTestCase(test.TestCase):
|
||||
|
||||
mock_credential.get.side_effect = credential_get
|
||||
|
||||
self.deployment["credentials"] = collections.OrderedDict([
|
||||
deploy = objects.Deployment(deployment=self.env)
|
||||
deploy._all_credentials = collections.OrderedDict([
|
||||
# the case when both admin and users are specified
|
||||
("openstack", [{"admin": openstack_admin,
|
||||
"users": [openstack_user_1, openstack_user_2]}]),
|
||||
@ -227,7 +158,6 @@ class DeploymentTestCase(test.TestCase):
|
||||
# the case when only users are specified
|
||||
("bar", [{"admin": None, "users": [bar_user_1]}])])
|
||||
|
||||
deploy = objects.Deployment(deployment=self.deployment)
|
||||
self.assertEqual({"openstack": [
|
||||
{"admin": openstack_cred.return_value,
|
||||
"users": [openstack_cred.return_value,
|
||||
@ -236,14 +166,17 @@ class DeploymentTestCase(test.TestCase):
|
||||
"bar": [{"admin": None, "users": [bar_cred.return_value]}]
|
||||
}, deploy.get_all_credentials())
|
||||
|
||||
self.assertEqual([mock.call(**openstack_admin),
|
||||
self.assertEqual([mock.call(permission=consts.EndpointPermission.ADMIN,
|
||||
**openstack_admin),
|
||||
mock.call(**openstack_user_1),
|
||||
mock.call(**openstack_user_2)],
|
||||
openstack_cred.call_args_list)
|
||||
foo_cred.assert_called_once_with(**foo_admin)
|
||||
foo_cred.assert_called_once_with(
|
||||
permission=consts.EndpointPermission.ADMIN,
|
||||
**foo_admin)
|
||||
bar_cred.assert_called_once_with(**bar_user_1)
|
||||
self.assertEqual([mock.call(p)
|
||||
for p in self.deployment["credentials"].keys()],
|
||||
for p in deploy._all_credentials.keys()],
|
||||
mock_credential.get.call_args_list)
|
||||
|
||||
@mock.patch("rally.deployment.credential.get")
|
||||
@ -251,126 +184,113 @@ class DeploymentTestCase(test.TestCase):
|
||||
credential_cls = mock_credential_get.return_value
|
||||
credential_inst = credential_cls.return_value
|
||||
|
||||
credentials = {"openstack": [{"admin": {"fake_admin": True},
|
||||
"users": [{"fake_user": True}]}]}
|
||||
self.deployment["credentials"] = credentials
|
||||
|
||||
deploy = objects.Deployment(deployment=self.deployment)
|
||||
deploy = objects.Deployment(deployment=self.env)
|
||||
deploy._all_credentials = {
|
||||
"openstack": [{"admin": {"fake_admin": True},
|
||||
"users": [{"fake_user": True}]}]}
|
||||
|
||||
self.assertEqual(credential_inst, deploy["admin"])
|
||||
self.assertEqual([credential_inst], deploy["users"])
|
||||
|
||||
@mock.patch("rally.common.objects.deploy.db.deployment_update")
|
||||
def test_update_empty_credentials(self, mock_deployment_update):
|
||||
deploy = objects.Deployment(deployment=self.deployment)
|
||||
deploy.update_credentials({})
|
||||
mock_deployment_update.assert_called_once_with(
|
||||
self.deployment["uuid"], {"credentials": {}})
|
||||
|
||||
def test_get_credentials_error(self):
|
||||
deploy = objects.Deployment(deployment=self.deployment)
|
||||
deploy = objects.Deployment(deployment=self.env)
|
||||
self.assertRaises(exceptions.RallyException,
|
||||
deploy.get_credentials_for, "bar")
|
||||
|
||||
@mock.patch("rally.common.objects.deploy.db.resource_create")
|
||||
def test_add_resource(self, mock_resource_create):
|
||||
mock_resource_create.return_value = self.resource
|
||||
deploy = objects.Deployment(deployment=self.deployment)
|
||||
resource = deploy.add_resource("provider", type="some",
|
||||
info={"key": "value"})
|
||||
self.assertEqual(self.resource["id"], resource["id"])
|
||||
mock_resource_create.assert_called_once_with({
|
||||
"deployment_uuid": self.deployment["uuid"],
|
||||
"provider_name": "provider",
|
||||
"type": "some",
|
||||
"info": {"key": "value"},
|
||||
})
|
||||
|
||||
@mock.patch("rally.common.objects.task.db.resource_delete")
|
||||
def test_delete(self, mock_resource_delete):
|
||||
objects.Deployment.delete_resource(42)
|
||||
mock_resource_delete.assert_called_once_with(42)
|
||||
|
||||
@mock.patch("rally.common.objects.task.db.resource_get_all")
|
||||
def test_get_resources(self, mock_resource_get_all):
|
||||
mock_resource_get_all.return_value = [self.resource]
|
||||
deploy = objects.Deployment(deployment=self.deployment)
|
||||
resources = deploy.get_resources(provider_name="provider", type="some")
|
||||
self.assertEqual(1, len(resources))
|
||||
self.assertEqual(self.resource["id"], resources[0]["id"])
|
||||
|
||||
@mock.patch("rally.common.objects.deploy.dt.datetime")
|
||||
@mock.patch("rally.common.objects.deploy.db.deployment_update")
|
||||
def test_update_set_started(self, mock_deployment_update, mock_datetime):
|
||||
mock_datetime.now = mock.Mock(return_value="fake_time")
|
||||
mock_deployment_update.return_value = self.deployment
|
||||
deploy = objects.Deployment(deployment=self.deployment)
|
||||
deploy.set_started()
|
||||
mock_deployment_update.assert_called_once_with(
|
||||
self.deployment["uuid"],
|
||||
{"started_at": "fake_time",
|
||||
"status": consts.DeployStatus.DEPLOY_STARTED}
|
||||
)
|
||||
|
||||
@mock.patch("rally.common.objects.deploy.dt.datetime")
|
||||
@mock.patch("rally.common.objects.deploy.db.deployment_update")
|
||||
def test_update_set_completed(self, mock_deployment_update, mock_datetime):
|
||||
mock_datetime.now = mock.Mock(return_value="fake_time")
|
||||
mock_deployment_update.return_value = self.deployment
|
||||
deploy = objects.Deployment(deployment=self.deployment)
|
||||
deploy.set_completed()
|
||||
mock_deployment_update.assert_called_once_with(
|
||||
self.deployment["uuid"],
|
||||
{"completed_at": "fake_time",
|
||||
"status": consts.DeployStatus.DEPLOY_FINISHED}
|
||||
)
|
||||
|
||||
def test_to_dict(self):
|
||||
self.deployment = {
|
||||
"status": "deploy->finished",
|
||||
"parent_uuid": None,
|
||||
"updated_at": dt.datetime(2017, 3, 10, 9, 5, 9, 117427),
|
||||
"completed_at": dt.datetime(2017, 3, 10, 12, 5, 9, 94981),
|
||||
"credentials":
|
||||
{"openstack":
|
||||
[{"admin":
|
||||
{"username": "foo_admin_name",
|
||||
"endpoint": None,
|
||||
"region_name": "FooRegionOne",
|
||||
"https_insecure": False,
|
||||
"permission": "foo_perm",
|
||||
"tenant_name": "foo_tenant",
|
||||
"user_domain_name": "Default",
|
||||
"https_cacert": "",
|
||||
"domain_name": None,
|
||||
"endpoint_type": None,
|
||||
"auth_url": "foo_auth_url",
|
||||
"password": "admin",
|
||||
"project_domain_name": "Default"},
|
||||
"users": []}]},
|
||||
"started_at": dt.datetime(2017, 3, 10, 12, 5, 9, 78779),
|
||||
"id": 1,
|
||||
"name": "foo_deployment_name",
|
||||
"uuid": "eeecf2c6-8b5d-4ed7-92e5-b7cdc335e885",
|
||||
"created_at": dt.datetime(2017, 3, 10, 9, 5, 9, 68652),
|
||||
"config": {
|
||||
"endpoint": None,
|
||||
"region_name": "FooRegionOne",
|
||||
"https_insecure": False,
|
||||
"admin": {
|
||||
"username": "foo_admin_name",
|
||||
"password": "foo_admin_pwd",
|
||||
"user_domain_name": "Default",
|
||||
"project_name": "foo_prj_name",
|
||||
"project_domain_name": "Default"},
|
||||
"https_cacert": "",
|
||||
"endpoint_type": None,
|
||||
"auth_url": "foo_auth_url",
|
||||
"type": "ExistingCloud"}}
|
||||
deploy = objects.Deployment(deployment=self.deployment)
|
||||
expected_result = deploy.to_dict()
|
||||
for field in ["created_at", "completed_at",
|
||||
"started_at", "updated_at"]:
|
||||
self.deployment[field] = self.deployment[field].strftime(
|
||||
self.TIME_FORMAT)
|
||||
self.assertEqual(expected_result, self.deployment)
|
||||
env = mock.Mock(
|
||||
status=env_mgr.STATUS.READY,
|
||||
data={
|
||||
"created_at": dt.datetime(2017, 3, 10, 9, 5, 9, 68652),
|
||||
"updated_at": dt.datetime(2017, 3, 10, 9, 5, 10, 117427),
|
||||
"id": 1,
|
||||
"name": "foo_env_name",
|
||||
"uuid": "eeecf2c6-8b5d-4ed7-92e5-b7cdc335e885",
|
||||
"platforms": [],
|
||||
"spec": {
|
||||
"existing@openstack": {
|
||||
"endpoint": None,
|
||||
"region_name": "FooRegionOne",
|
||||
"https_insecure": False,
|
||||
"admin": {
|
||||
"username": "foo_admin_name",
|
||||
"password": "foo_admin_pwd",
|
||||
"user_domain_name": "Default",
|
||||
"project_name": "foo_prj_name",
|
||||
"project_domain_name": "Default"},
|
||||
"https_cacert": "",
|
||||
"endpoint_type": None,
|
||||
"auth_url": "foo_auth_url"}
|
||||
}})
|
||||
deploy = objects.Deployment(deployment=env)
|
||||
config = {"openstack": env.data["spec"]["existing@openstack"]}
|
||||
self.assertEqual(
|
||||
{
|
||||
"created_at": "2017-03-10T09:05:09",
|
||||
"started_at": "2017-03-10T09:05:09",
|
||||
"updated_at": "2017-03-10T09:05:10",
|
||||
"completed_at": "n/a",
|
||||
"id": 1,
|
||||
"uuid": "eeecf2c6-8b5d-4ed7-92e5-b7cdc335e885",
|
||||
"name": "foo_env_name",
|
||||
"parent_uuid": None,
|
||||
"status": "deploy->finished",
|
||||
"config": config,
|
||||
"credentials": {}},
|
||||
deploy.to_dict())
|
||||
|
||||
def test_getitem(self):
|
||||
|
||||
class FakeEnvManager(object):
|
||||
@property
|
||||
def status(self):
|
||||
return env_mgr.STATUS.READY
|
||||
|
||||
@property
|
||||
def data(self):
|
||||
return {
|
||||
"created_at": dt.datetime(2017, 3, 10, 9, 5, 9, 68652),
|
||||
"updated_at": dt.datetime(2017, 3, 10, 9, 5, 10, 117427),
|
||||
"id": 1,
|
||||
"name": "foo_env_name",
|
||||
"uuid": "eeecf2c6-8b5d-4ed7-92e5-b7cdc335e885",
|
||||
"platforms": [],
|
||||
"extras": {"foo": "bar"},
|
||||
"spec": {
|
||||
"existing@openstack": {
|
||||
"endpoint": None,
|
||||
"region_name": "FooRegionOne",
|
||||
"https_insecure": False,
|
||||
"admin": {
|
||||
"username": "foo_admin_name",
|
||||
"password": "foo_admin_pwd",
|
||||
"user_domain_name": "Default",
|
||||
"project_name": "foo_prj_name",
|
||||
"project_domain_name": "Default"},
|
||||
"https_cacert": "",
|
||||
"endpoint_type": None,
|
||||
"auth_url": "foo_auth_url"}
|
||||
}
|
||||
}
|
||||
|
||||
deploy = objects.Deployment(deployment=FakeEnvManager())
|
||||
|
||||
self.assertEqual("deploy->finished", deploy["status"])
|
||||
|
||||
self.assertEqual({"foo": "bar"}, deploy["extra"])
|
||||
self.assertEqual(
|
||||
{
|
||||
"openstack": {
|
||||
"admin": {
|
||||
"password": "foo_admin_pwd",
|
||||
"project_domain_name": "Default",
|
||||
"project_name": "foo_prj_name",
|
||||
"user_domain_name": "Default",
|
||||
"username": "foo_admin_name"},
|
||||
"auth_url": "foo_auth_url",
|
||||
"endpoint": None,
|
||||
"endpoint_type": None,
|
||||
"https_cacert": "",
|
||||
"https_insecure": False,
|
||||
"region_name": "FooRegionOne"}},
|
||||
deploy["config"])
|
||||
|
@ -171,22 +171,21 @@ class TaskTestCase(test.TestCase):
|
||||
"validation_result": {"a": "fake"}}
|
||||
)
|
||||
|
||||
@mock.patch("rally.common.objects.task.db.deployment_get")
|
||||
def test_to_dict(self, mock_deployment_get):
|
||||
@mock.patch("rally.common.objects.task.db.env_get")
|
||||
def test_to_dict(self, mock_env_get):
|
||||
workloads = [{"created_at": dt.datetime.now(),
|
||||
"updated_at": dt.datetime.now()}]
|
||||
self.task.update({"deployment_uuid": "deployment_uuid",
|
||||
"deployment_name": "deployment_name",
|
||||
self.task.update({"env_uuid": "deployment_uuid",
|
||||
"deployment_uuid": "deployment_uuid",
|
||||
"created_at": dt.datetime.now(),
|
||||
"updated_at": dt.datetime.now()})
|
||||
|
||||
mock_deployment_get.return_value = {"name": "deployment_name"}
|
||||
mock_env_get.return_value = {"name": "deployment_name"}
|
||||
|
||||
task = objects.Task(task=self.task)
|
||||
serialized_task = task.to_dict()
|
||||
|
||||
mock_deployment_get.assert_called_once_with(
|
||||
self.task["deployment_uuid"])
|
||||
mock_env_get.assert_called_once_with(self.task["env_uuid"])
|
||||
self.assertEqual(self.task, serialized_task)
|
||||
|
||||
self.task["subtasks"] = [{"workloads": workloads}]
|
||||
|
@ -13,6 +13,7 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import copy
|
||||
import datetime as dt
|
||||
|
||||
import mock
|
||||
@ -26,7 +27,8 @@ class VerificationTestCase(test.TestCase):
|
||||
def setUp(self):
|
||||
super(VerificationTestCase, self).setUp()
|
||||
|
||||
self.db_obj = {"uuid": "uuid-1"}
|
||||
self.db_obj = {"uuid": "uuid-1",
|
||||
"env_uuid": "e_uuid"}
|
||||
self._db_entry = {}
|
||||
|
||||
@mock.patch("rally.common.objects.verification.db.verification_create")
|
||||
@ -41,7 +43,7 @@ class VerificationTestCase(test.TestCase):
|
||||
data = {"created_at": dt.date(2017, 2, 3),
|
||||
"updated_at": dt.date(2017, 3, 3),
|
||||
"id": "v_id",
|
||||
"deployment_uuid": "d_uuid",
|
||||
"env_uuid": "d_uuid",
|
||||
"uuid": "v_uuid",
|
||||
"verifier_uuid": "v_uuid",
|
||||
"unexpected_success": "2",
|
||||
@ -56,11 +58,11 @@ class VerificationTestCase(test.TestCase):
|
||||
"expected_failures": 2,
|
||||
"tests_count": 3,
|
||||
"failures": 2}
|
||||
verification = objects.Verification("verification_id")
|
||||
verification._db_entry = data
|
||||
result = objects.Verification.to_dict(verification)
|
||||
verification = objects.Verification(copy.deepcopy(data))
|
||||
result = verification.to_dict()
|
||||
data["created_at"] = data["created_at"].strftime(TIME_FORMAT)
|
||||
data["updated_at"] = data["updated_at"].strftime(TIME_FORMAT)
|
||||
data["deployment_uuid"] = data["env_uuid"]
|
||||
self.assertEqual(data, result)
|
||||
|
||||
@mock.patch("rally.common.objects.verification.db.verification_create")
|
||||
|
@ -77,13 +77,12 @@ class VerifierTestCase(test.TestCase):
|
||||
mock_verifier_update.assert_called_once_with(self.db_obj["uuid"],
|
||||
status="some-status")
|
||||
|
||||
@mock.patch("rally.common.objects.verifier.db.deployment_get")
|
||||
def test_deployment_property(self, mock_deployment_get):
|
||||
@mock.patch("rally.env.env_mgr.EnvManager.get")
|
||||
def test_deployment_property(self, mock_env_manager_get):
|
||||
v = objects.Verifier(self.db_obj)
|
||||
mock_deployment_get.return_value = {"name": "foo", "uuid": "bar"}
|
||||
v.set_deployment("some-deployment")
|
||||
self.assertEqual("foo", v.deployment["name"])
|
||||
self.assertEqual("bar", v.deployment["uuid"])
|
||||
self.assertEqual(mock_env_manager_get.return_value, v.deployment._env)
|
||||
mock_env_manager_get.assert_called_once_with("some-deployment")
|
||||
|
||||
def test_deployment_property_raise_exc(self):
|
||||
v = objects.Verifier(self.db_obj)
|
||||
|
@ -138,6 +138,23 @@ class RallyContaxtAdapterTestCase(test.TestCase):
|
||||
self.assertTrue(args[0][0].startswith("[radapter.exception(Exception("
|
||||
"\"!2!\"))] Do not transmit"))
|
||||
|
||||
@mock.patch("rally.common.logging.getLogger")
|
||||
def test_error(self, mock_get_logger):
|
||||
radapter = rally_logging.RallyContextAdapter(mock.MagicMock(), {})
|
||||
radapter.log = mock.MagicMock()
|
||||
|
||||
radapter.error("foo", "bar")
|
||||
|
||||
# the number of the line which calls foo
|
||||
lineno = 146
|
||||
mock_get_logger.assert_called_once_with("%s:%s" % (__file__, lineno))
|
||||
|
||||
logger = mock_get_logger.return_value
|
||||
self.assertEqual(1, logger.warning.call_count)
|
||||
args = logger.warning.call_args_list[0]
|
||||
self.assertTrue(args[0][0].startswith("[radapter.error(\"foo\", "
|
||||
"\"bar\")] Do not use *args "))
|
||||
|
||||
|
||||
class ExceptionLoggerTestCase(test.TestCase):
|
||||
|
||||
|
@ -1,187 +0,0 @@
|
||||
# Copyright 2013: Mirantis Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Test ExistingCloud."""
|
||||
|
||||
import ddt
|
||||
import jsonschema
|
||||
|
||||
from rally import consts
|
||||
from rally.deployment import engine as deploy_engine
|
||||
from rally.deployment.engines import existing
|
||||
from tests.unit import test
|
||||
|
||||
|
||||
@ddt.ddt
|
||||
class TestExistingCloud(test.TestCase):
|
||||
def setUp(self):
|
||||
super(TestExistingCloud, self).setUp()
|
||||
self.deployments = {
|
||||
"v2.0": {
|
||||
"config": {
|
||||
"type": "ExistingCloud",
|
||||
"auth_url": "http://example.net:5000/v2.0/",
|
||||
"region_name": "RegionOne",
|
||||
"endpoint_type": consts.EndpointType.INTERNAL,
|
||||
"https_insecure": False,
|
||||
"https_cacert": "cacert",
|
||||
"profiler_hmac_key": None,
|
||||
"profiler_conn_str": None,
|
||||
"admin": {
|
||||
"username": "admin",
|
||||
"password": "myadminpass",
|
||||
"tenant_name": "demo"
|
||||
}
|
||||
}
|
||||
},
|
||||
"v3": {
|
||||
"config": {
|
||||
"type": "ExistingCloud",
|
||||
"auth_url": "http://example.net:5000/v3/",
|
||||
"region_name": "RegionOne",
|
||||
"endpoint_type": consts.EndpointType.INTERNAL,
|
||||
"https_insecure": False,
|
||||
"https_cacert": "cacert",
|
||||
"profiler_hmac_key": None,
|
||||
"profiler_conn_str": None,
|
||||
"admin": {
|
||||
"username": "admin",
|
||||
"password": "myadminpass",
|
||||
"domain_name": "domain",
|
||||
"project_name": "demo",
|
||||
"project_domain_name": "Default",
|
||||
"user_domain_name": "Default",
|
||||
}
|
||||
}
|
||||
},
|
||||
"abstract": {
|
||||
"config": {
|
||||
"type": "ExistingCloud",
|
||||
"creds": {
|
||||
"openstack": {
|
||||
"auth_url": "http://example.net:5000/v2.0/",
|
||||
"region_name": "RegionOne",
|
||||
"endpoint_type": consts.EndpointType.INTERNAL,
|
||||
"https_insecure": False,
|
||||
"https_cacert": "cacert",
|
||||
"profiler_hmac_key": None,
|
||||
"profiler_conn_str": None,
|
||||
"admin": {
|
||||
"username": "admin",
|
||||
"password": "myadminpass",
|
||||
"tenant_name": "demo"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@ddt.data("v2.0", "v3", "abstract")
|
||||
def test_init_and_valid_config(self, keystone_version):
|
||||
engine = existing.ExistingCloud(self.deployments[keystone_version])
|
||||
engine.validate()
|
||||
|
||||
@ddt.data("v2.0", "v3", "abstract")
|
||||
def test_invalid_config(self, keystone_version):
|
||||
deployment = self.deployments[keystone_version]
|
||||
deployment["config"]["admin"] = 42
|
||||
engine = existing.ExistingCloud(deployment)
|
||||
self.assertRaises(jsonschema.ValidationError,
|
||||
engine.validate)
|
||||
|
||||
@ddt.data("v2.0", "v3", "abstract")
|
||||
def test_additional_vars(self, keystone_version):
|
||||
deployment = self.deployments[keystone_version]
|
||||
deployment["extra"] = {}
|
||||
existing.ExistingCloud(deployment).validate()
|
||||
|
||||
deployment["extra"] = {"some_var": "some_value"}
|
||||
existing.ExistingCloud(deployment).validate()
|
||||
|
||||
deployment["extra"] = ["item1", "item2"]
|
||||
existing.ExistingCloud(deployment).validate()
|
||||
|
||||
@ddt.data("v2.0", "v3")
|
||||
def test_deploy(self, keystone_version):
|
||||
deployment = self.deployments[keystone_version]
|
||||
engine = existing.ExistingCloud(deployment)
|
||||
credentials = engine.deploy()
|
||||
credentials = credentials["openstack"][0]
|
||||
admin_credential = deployment["config"].copy()
|
||||
admin_credential.pop("type")
|
||||
admin_credential["endpoint"] = None
|
||||
admin_credential.update(admin_credential.pop("admin"))
|
||||
admin_credential["permission"] = consts.EndpointPermission.ADMIN
|
||||
|
||||
actual_credentials = credentials["admin"]
|
||||
|
||||
if keystone_version == "v3":
|
||||
# NOTE(andreykurilin): credentials obj uses `tenant_name` for both
|
||||
# keystone v2 and v3. It works perfectly for rally code (no
|
||||
# contradictions and misunderstandings ), but in case of checking
|
||||
# credentials.to_dict with data from database (where we use
|
||||
# project_name for keystone v3 config and tenant_name for
|
||||
# keystone v2), we need to transform vars.
|
||||
admin_credential["tenant_name"] = admin_credential.pop(
|
||||
"project_name")
|
||||
else:
|
||||
# NOTE(andreykurilin): there are no domain related variables in v2,
|
||||
# so we need to pop them from credentials.to_dict()
|
||||
actual_credentials.pop("domain_name")
|
||||
actual_credentials.pop("user_domain_name")
|
||||
actual_credentials.pop("project_domain_name")
|
||||
|
||||
self.assertEqual(admin_credential, actual_credentials)
|
||||
self.assertEqual([], credentials["users"])
|
||||
|
||||
def test_deploy_abstract(self):
|
||||
deployment = self.deployments["abstract"]
|
||||
engine = existing.ExistingCloud(deployment)
|
||||
credentials = engine.deploy()
|
||||
self.assertEqual(1, len(credentials))
|
||||
self.assertIn("openstack", credentials)
|
||||
self.assertEqual(1, len(credentials["openstack"]))
|
||||
credentials = credentials["openstack"][0]
|
||||
self.assertEqual([], credentials["users"])
|
||||
admin_credential = credentials["admin"]
|
||||
self.assertEqual({
|
||||
"auth_url": "http://example.net:5000/v2.0/",
|
||||
"domain_name": None,
|
||||
"endpoint": None,
|
||||
"endpoint_type": "internal",
|
||||
"https_cacert": "cacert",
|
||||
"https_insecure": False,
|
||||
"profiler_hmac_key": None,
|
||||
"profiler_conn_str": None,
|
||||
"password": "myadminpass",
|
||||
"permission": "admin",
|
||||
"project_domain_name": None,
|
||||
"region_name": "RegionOne",
|
||||
"tenant_name": "demo",
|
||||
"user_domain_name": None,
|
||||
"username": "admin"}, admin_credential)
|
||||
|
||||
@ddt.data("v2.0", "v3", "abstract")
|
||||
def test_cleanup(self, keystone_version):
|
||||
existing.ExistingCloud(self.deployments[keystone_version]).cleanup()
|
||||
|
||||
@ddt.data("v2.0", "v3", "abstract")
|
||||
def test_is_in_factory(self, keystone_version):
|
||||
name = self.deployments[keystone_version]["config"]["type"]
|
||||
engine = deploy_engine.Engine.get_engine(
|
||||
name, self.deployments[keystone_version])
|
||||
self.assertIsInstance(engine, existing.ExistingCloud)
|
@ -13,8 +13,6 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import jsonschema
|
||||
|
||||
from rally.deployment import credential
|
||||
from tests.unit import test
|
||||
|
||||
@ -48,37 +46,3 @@ class CredentialTestCase(test.TestCase):
|
||||
cred.verify_connection()
|
||||
self.assertEqual({"bar": 42}, cred.to_dict())
|
||||
self.assertEqual({"foo": "foo-type"}, cred.list_services())
|
||||
|
||||
|
||||
@credential.configure_builder("foo")
|
||||
class FooCredentialBuilder(credential.CredentialBuilder):
|
||||
|
||||
CONFIG_SCHEMA = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"bar": {"type": "integer"}
|
||||
},
|
||||
"required": ["bar"],
|
||||
"additionalProperties": False
|
||||
}
|
||||
|
||||
def build_credentials(self):
|
||||
return {"admin": {"bar": self.config["bar"]}, "users": []}
|
||||
|
||||
|
||||
class CredentialBuilderTestCase(test.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(CredentialBuilderTestCase, self).setUp()
|
||||
self.cred_builder_cls = credential.get_builder("foo")
|
||||
|
||||
def test_configure_and_get(self):
|
||||
self.assertIs(FooCredentialBuilder, self.cred_builder_cls)
|
||||
|
||||
def test_validate(self):
|
||||
self.cred_builder_cls.validate({"bar": 42})
|
||||
|
||||
def test_validate_error(self):
|
||||
self.assertRaises(jsonschema.ValidationError,
|
||||
self.cred_builder_cls.validate,
|
||||
{"bar": "spam"})
|
||||
|
@ -1,215 +0,0 @@
|
||||
# Copyright 2013: Mirantis Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Test for deploy engines."""
|
||||
|
||||
import mock
|
||||
|
||||
from rally import consts
|
||||
from rally.deployment import engine
|
||||
from rally import exceptions
|
||||
from tests.unit import test
|
||||
|
||||
|
||||
def make_fake_deployment(**kwargs):
|
||||
values = dict({
|
||||
"uuid": "1359befb-8737-4f4e-bea9-492416106977",
|
||||
"config": {
|
||||
"name": "fake",
|
||||
},
|
||||
"status": consts.DeployStatus.DEPLOY_INIT,
|
||||
}, **kwargs)
|
||||
return FakeDeployment(values=values)
|
||||
|
||||
|
||||
class FakeDeployment(object):
|
||||
|
||||
def __init__(self, values=None):
|
||||
if values is None:
|
||||
values = {}
|
||||
self._values = values
|
||||
|
||||
def __getitem__(self, name):
|
||||
return self._values[name]
|
||||
|
||||
def update_status(self, status):
|
||||
self._values["status"] = status
|
||||
|
||||
def set_started(self):
|
||||
pass
|
||||
|
||||
def set_completed(self):
|
||||
pass
|
||||
|
||||
def delete(self):
|
||||
pass
|
||||
|
||||
|
||||
@engine.configure(name="FakeEngine")
|
||||
class FakeEngine(engine.Engine):
|
||||
"""Fake deployment engine.
|
||||
|
||||
Used for tests.
|
||||
"""
|
||||
deployed = False
|
||||
cleanuped = False
|
||||
|
||||
def __init__(self, deployment):
|
||||
super(FakeEngine, self).__init__(deployment)
|
||||
self.deployment = deployment
|
||||
|
||||
def deploy(self):
|
||||
self.deployed = True
|
||||
return self
|
||||
|
||||
def cleanup(self):
|
||||
self.cleanuped = True
|
||||
|
||||
|
||||
class EngineMixIn(object):
|
||||
def deploy(self):
|
||||
pass
|
||||
|
||||
def cleanup(self):
|
||||
pass
|
||||
|
||||
|
||||
class EngineTestCase(test.TestCase):
|
||||
|
||||
def test_get_engine_not_found(self):
|
||||
deployment = make_fake_deployment()
|
||||
self.assertRaises(exceptions.PluginNotFound,
|
||||
engine.Engine.get_engine,
|
||||
"non_existing_engine", deployment)
|
||||
self.assertEqual(consts.DeployStatus.DEPLOY_FAILED,
|
||||
deployment["status"])
|
||||
|
||||
def test_config(self):
|
||||
deployment = make_fake_deployment()
|
||||
engine = FakeEngine(deployment)
|
||||
self.assertEqual(deployment["config"], engine.config)
|
||||
|
||||
@mock.patch.object(FakeDeployment, "set_completed")
|
||||
@mock.patch.object(FakeDeployment, "set_started")
|
||||
def test_make_deploy(self, mock_fake_deployment_set_started,
|
||||
mock_fake_deployment_set_completed):
|
||||
deployment = make_fake_deployment()
|
||||
engine = FakeEngine(deployment)
|
||||
credential = engine.make_deploy()
|
||||
self.assertEqual(engine, credential)
|
||||
self.assertTrue(credential.deployed)
|
||||
self.assertFalse(credential.cleanuped)
|
||||
mock_fake_deployment_set_completed.assert_called_once_with()
|
||||
mock_fake_deployment_set_started.assert_called_once_with()
|
||||
|
||||
@mock.patch.object(FakeDeployment, "set_started")
|
||||
@mock.patch.object(FakeEngine, "deploy")
|
||||
def test_make_deploy_failed(self, mock_fake_engine_deploy,
|
||||
mock_fake_deployment_set_started):
|
||||
class DeployFailed(Exception):
|
||||
pass
|
||||
|
||||
deployment = make_fake_deployment()
|
||||
engine = FakeEngine(deployment)
|
||||
mock_fake_engine_deploy.side_effect = DeployFailed()
|
||||
self.assertRaises(DeployFailed, engine.make_deploy)
|
||||
mock_fake_deployment_set_started.assert_called_once_with()
|
||||
|
||||
@mock.patch.object(FakeDeployment, "update_status")
|
||||
def test_make_cleanup(self, mock_fake_deployment_update_status):
|
||||
deployment = make_fake_deployment()
|
||||
engine = FakeEngine(deployment)
|
||||
engine.make_cleanup()
|
||||
self.assertTrue(engine.cleanuped)
|
||||
self.assertFalse(engine.deployed)
|
||||
mock_fake_deployment_update_status.assert_has_calls([
|
||||
mock.call(consts.DeployStatus.CLEANUP_STARTED),
|
||||
mock.call(consts.DeployStatus.CLEANUP_FINISHED),
|
||||
])
|
||||
self.assertTrue(engine.cleanuped)
|
||||
|
||||
@mock.patch.object(FakeDeployment, "update_status")
|
||||
@mock.patch.object(FakeEngine, "cleanup")
|
||||
def test_make_cleanup_failed(self, mock_fake_engine_cleanup,
|
||||
mock_fake_deployment_update_status):
|
||||
class CleanUpFailed(Exception):
|
||||
pass
|
||||
|
||||
deployment = make_fake_deployment()
|
||||
engine = FakeEngine(deployment)
|
||||
mock_fake_engine_cleanup.side_effect = CleanUpFailed()
|
||||
self.assertRaises(CleanUpFailed, engine.make_cleanup)
|
||||
mock_fake_deployment_update_status.assert_has_calls([
|
||||
mock.call(consts.DeployStatus.CLEANUP_STARTED),
|
||||
])
|
||||
self.assertFalse(engine.cleanuped)
|
||||
|
||||
@mock.patch.object(FakeDeployment, "update_status")
|
||||
def test_with_statement(self, mock_fake_deployment_update_status):
|
||||
deployment = make_fake_deployment()
|
||||
engine = FakeEngine(deployment)
|
||||
with engine as deployer:
|
||||
self.assertEqual(engine, deployer)
|
||||
self.assertFalse(mock_fake_deployment_update_status.called)
|
||||
self.assertFalse(engine.cleanuped)
|
||||
self.assertFalse(engine.deployed)
|
||||
|
||||
def test_with_statement_failed_on_init(self):
|
||||
self._assert_changed_status_on_error(
|
||||
consts.DeployStatus.DEPLOY_INIT,
|
||||
consts.DeployStatus.DEPLOY_FAILED)
|
||||
|
||||
def test_with_statement_failed_on_started(self):
|
||||
self._assert_changed_status_on_error(
|
||||
consts.DeployStatus.DEPLOY_STARTED,
|
||||
consts.DeployStatus.DEPLOY_FAILED)
|
||||
|
||||
def test_with_statement_failed_on_finished(self):
|
||||
self._assert_changed_status_on_error(
|
||||
consts.DeployStatus.DEPLOY_FINISHED,
|
||||
consts.DeployStatus.DEPLOY_INCONSISTENT)
|
||||
|
||||
def test_with_statement_failed_on_cleanup(self):
|
||||
self._assert_changed_status_on_error(
|
||||
consts.DeployStatus.CLEANUP_STARTED,
|
||||
consts.DeployStatus.CLEANUP_FAILED)
|
||||
|
||||
@mock.patch.object(FakeDeployment, "update_status")
|
||||
def _assert_changed_status_on_error(self, initial, final,
|
||||
mock_fake_deployment_update_status):
|
||||
|
||||
class SomeError(Exception):
|
||||
pass
|
||||
|
||||
def context_with_error(manager):
|
||||
with mock.patch("traceback.print_exception"):
|
||||
with manager:
|
||||
raise SomeError()
|
||||
|
||||
deployment = make_fake_deployment(status=initial)
|
||||
engine = FakeEngine(deployment)
|
||||
self.assertRaises(SomeError, context_with_error, engine)
|
||||
mock_fake_deployment_update_status.assert_called_once_with(final)
|
||||
self.assertFalse(engine.cleanuped)
|
||||
self.assertFalse(engine.deployed)
|
||||
|
||||
def test_get_engine(self):
|
||||
deployment = make_fake_deployment()
|
||||
engine_inst = engine.Engine.get_engine("FakeEngine",
|
||||
deployment)
|
||||
self.assertIsInstance(engine_inst, FakeEngine)
|
||||
|
||||
def test_engine_factory_is_abstract(self):
|
||||
self.assertRaises(TypeError, engine.Engine)
|
14
tests/unit/env/test_env_mgr.py
vendored
14
tests/unit/env/test_env_mgr.py
vendored
@ -533,7 +533,7 @@ class EnvManagerTestCase(test.TestCase):
|
||||
)
|
||||
|
||||
@mock.patch("rally.env.env_mgr.EnvManager.cleanup")
|
||||
def test_destory_cleanup_failed(self, mock_env_manager_cleanup):
|
||||
def test_destroy_cleanup_failed(self, mock_env_manager_cleanup):
|
||||
mock_env_manager_cleanup.return_value = {
|
||||
"platform_1": {
|
||||
"errors": [],
|
||||
@ -580,12 +580,17 @@ class EnvManagerTestCase(test.TestCase):
|
||||
])
|
||||
mock__get_platforms.assert_called_once_with()
|
||||
|
||||
@mock.patch("rally.common.objects.Verifier.list")
|
||||
@mock.patch("rally.common.db.env_set_status")
|
||||
@mock.patch("rally.common.db.platform_set_status")
|
||||
@mock.patch("rally.env.env_mgr.EnvManager._get_platforms")
|
||||
def test_destory_with_platforms(self, mock__get_platforms,
|
||||
def test_destroy_with_platforms(self, mock__get_platforms,
|
||||
mock_platform_set_status,
|
||||
mock_env_set_status):
|
||||
mock_env_set_status,
|
||||
mock_verifier_list):
|
||||
verifier = mock.Mock()
|
||||
mock_verifier_list.return_value = [verifier]
|
||||
|
||||
platform1 = mock.MagicMock()
|
||||
platform1.get_fullname.return_value = "p_destroyed"
|
||||
platform1.status = platform.STATUS.DESTROYED
|
||||
@ -653,6 +658,9 @@ class EnvManagerTestCase(test.TestCase):
|
||||
platform.STATUS.FAILED_TO_DESTROY)
|
||||
])
|
||||
|
||||
verifier.set_deployment.assert_called_once_with(666)
|
||||
verifier.manager.uninstall.assert_called_once_with()
|
||||
|
||||
@mock.patch("rally.common.db.env_get_status")
|
||||
@mock.patch("rally.common.db.env_delete_cascade")
|
||||
def test_delete(self, mock_env_delete_cascade, mock_env_get_status):
|
||||
|
@ -13,7 +13,6 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import jsonschema
|
||||
import mock
|
||||
|
||||
from rally import consts
|
||||
@ -81,79 +80,3 @@ class OpenStackCredentialTestCase(test.TestCase):
|
||||
mock_clients.assert_called_once_with(
|
||||
self.credential, api_info="fake_info", cache={})
|
||||
self.assertIs(mock_clients.return_value, clients)
|
||||
|
||||
|
||||
class OpenStackCredentialBuilderTestCase(test.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(OpenStackCredentialBuilderTestCase, self).setUp()
|
||||
self.config = {
|
||||
"auth_url": "http://example.net:5000/v2.0/",
|
||||
"region_name": "RegionOne",
|
||||
"endpoint_type": consts.EndpointType.INTERNAL,
|
||||
"https_insecure": False,
|
||||
"https_cacert": "cacert",
|
||||
"admin": {
|
||||
"username": "admin",
|
||||
"password": "myadminpass",
|
||||
"tenant_name": "demo"
|
||||
},
|
||||
"users": [
|
||||
{
|
||||
"username": "user1",
|
||||
"password": "userpass",
|
||||
"tenant_name": "demo"
|
||||
}
|
||||
]
|
||||
}
|
||||
self.cred_builder_cls = credential.get_builder("openstack")
|
||||
|
||||
def test_validate(self):
|
||||
self.cred_builder_cls.validate(self.config)
|
||||
|
||||
def test_validate_error(self):
|
||||
self.assertRaises(jsonschema.ValidationError,
|
||||
self.cred_builder_cls.validate,
|
||||
{"foo": "bar"})
|
||||
|
||||
def test_build_credentials(self):
|
||||
creds_builder = self.cred_builder_cls(self.config)
|
||||
creds = creds_builder.build_credentials()
|
||||
self.assertEqual({
|
||||
"admin": {
|
||||
"auth_url": "http://example.net:5000/v2.0/",
|
||||
"username": "admin",
|
||||
"password": "myadminpass",
|
||||
"permission": consts.EndpointPermission.ADMIN,
|
||||
"domain_name": None,
|
||||
"endpoint": None,
|
||||
"endpoint_type": consts.EndpointType.INTERNAL,
|
||||
"https_cacert": "cacert",
|
||||
"https_insecure": False,
|
||||
"profiler_hmac_key": None,
|
||||
"profiler_conn_str": None,
|
||||
"project_domain_name": None,
|
||||
"region_name": "RegionOne",
|
||||
"tenant_name": "demo",
|
||||
"user_domain_name": None,
|
||||
},
|
||||
"users": [
|
||||
{
|
||||
"auth_url": "http://example.net:5000/v2.0/",
|
||||
"username": "user1",
|
||||
"password": "userpass",
|
||||
"permission": consts.EndpointPermission.USER,
|
||||
"domain_name": None,
|
||||
"endpoint": None,
|
||||
"endpoint_type": consts.EndpointType.INTERNAL,
|
||||
"https_cacert": "cacert",
|
||||
"https_insecure": False,
|
||||
"profiler_hmac_key": None,
|
||||
"profiler_conn_str": None,
|
||||
"project_domain_name": None,
|
||||
"region_name": "RegionOne",
|
||||
"tenant_name": "demo",
|
||||
"user_domain_name": None,
|
||||
}
|
||||
]
|
||||
}, creds)
|
||||
|
@ -19,7 +19,6 @@ import copy
|
||||
import os
|
||||
|
||||
import ddt
|
||||
import jsonschema
|
||||
import mock
|
||||
from oslo_config import cfg
|
||||
|
||||
@ -33,8 +32,7 @@ from tests.unit import test
|
||||
|
||||
FAKE_DEPLOYMENT_CONFIG = {
|
||||
# TODO(akscram): A fake engine is more suitable for that.
|
||||
"type": "ExistingCloud",
|
||||
"creds": {"openstack": {
|
||||
"openstack": {
|
||||
"auth_url": "http://example.net:5000/v2.0/",
|
||||
"admin": {
|
||||
"username": "admin",
|
||||
@ -47,7 +45,8 @@ FAKE_DEPLOYMENT_CONFIG = {
|
||||
"profiler_conn_str": None
|
||||
},
|
||||
"region_name": "RegionOne",
|
||||
"endpoint_type": consts.EndpointType.INTERNAL}}
|
||||
"endpoint_type": consts.EndpointType.INTERNAL
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -90,7 +89,7 @@ class TaskAPITestCase(test.TestCase):
|
||||
mock_task_engine.return_value.validate.assert_called_once_with()
|
||||
|
||||
mock_task.assert_called_once_with(
|
||||
temporary=True, deployment_uuid=fake_deployment["uuid"])
|
||||
temporary=True, env_uuid=fake_deployment["uuid"])
|
||||
mock_deployment_get.assert_called_once_with(fake_deployment["uuid"])
|
||||
self.assertFalse(mock_task.get.called)
|
||||
|
||||
@ -248,7 +247,7 @@ class TaskAPITestCase(test.TestCase):
|
||||
self.task_inst.create(
|
||||
deployment=mock_deployment_get.return_value["uuid"], tags=tags)
|
||||
mock_task.assert_called_once_with(
|
||||
deployment_uuid=mock_deployment_get.return_value["uuid"],
|
||||
env_uuid=mock_deployment_get.return_value["uuid"],
|
||||
tags=tags)
|
||||
|
||||
@mock.patch("rally.common.objects.Deployment.get",
|
||||
@ -326,7 +325,7 @@ class TaskAPITestCase(test.TestCase):
|
||||
status=consts.DeployStatus.DEPLOY_INCONSISTENT,
|
||||
name="foo")
|
||||
mock_deployment_get.return_value = fake_deployment
|
||||
fake_task_dict = {"deployment_uuid": deployment_uuid,
|
||||
fake_task_dict = {"env_uuid": deployment_uuid,
|
||||
"uuid": "some_uuid"}
|
||||
fake_task = objects.Task(task=fake_task_dict)
|
||||
mock_task_get.return_value = fake_task
|
||||
@ -344,16 +343,13 @@ class TaskAPITestCase(test.TestCase):
|
||||
@mock.patch("rally.api.CONF", spec=cfg.CONF)
|
||||
def test_start_exception(self, mock_conf, mock_task_engine,
|
||||
mock_deployment_get, mock_task, mock_task_config):
|
||||
fake_deployment = fakes.FakeDeployment(
|
||||
mock_deployment_get.return_value = fakes.FakeDeployment(
|
||||
status=consts.DeployStatus.DEPLOY_FINISHED,
|
||||
name="foo", uuid="deployment_uuid")
|
||||
mock_deployment_get.return_value = fake_deployment
|
||||
mock_task.return_value.is_temporary = False
|
||||
mock_task_engine.return_value.run.side_effect = TypeError
|
||||
self.assertRaises(TypeError, self.task_inst.start,
|
||||
deployment="deployment_uuid", config="config")
|
||||
fake_deployment.update_status.assert_called_once_with(
|
||||
consts.DeployStatus.DEPLOY_INCONSISTENT)
|
||||
|
||||
@mock.patch("rally.api.objects.Task")
|
||||
@mock.patch("rally.api.objects.Deployment.get")
|
||||
@ -543,7 +539,7 @@ class TaskAPITestCase(test.TestCase):
|
||||
task_results=task_results)
|
||||
)
|
||||
|
||||
mock_task.assert_called_once_with(deployment_uuid="deployment_uuid",
|
||||
mock_task.assert_called_once_with(env_uuid="deployment_uuid",
|
||||
tags=None)
|
||||
mock_task.return_value.update_status.assert_has_calls(
|
||||
[mock.call(consts.TaskStatus.RUNNING),
|
||||
@ -615,7 +611,7 @@ class TaskAPITestCase(test.TestCase):
|
||||
task_results=task_results)
|
||||
)
|
||||
|
||||
mock_task.assert_called_once_with(deployment_uuid="deployment_uuid",
|
||||
mock_task.assert_called_once_with(env_uuid="deployment_uuid",
|
||||
tags=None)
|
||||
mock_task.return_value.update_status.assert_has_calls(
|
||||
[mock.call(consts.TaskStatus.RUNNING),
|
||||
@ -650,9 +646,8 @@ class TaskAPITestCase(test.TestCase):
|
||||
hooks_results=workload["hooks"], start_time=workload["start_time"])
|
||||
|
||||
@mock.patch("rally.api.objects.Deployment.get")
|
||||
@mock.patch("rally.api.jsonschema.validate", return_value=True)
|
||||
def test_import_results_with_inconsistent_deployment(
|
||||
self, mock_jsonschema_validate, mock_deployment_get):
|
||||
self, mock_deployment_get):
|
||||
fake_deployment = fakes.FakeDeployment(
|
||||
uuid="deployment_uuid", admin="fake_admin", users=["fake_user"],
|
||||
status=consts.DeployStatus.DEPLOY_INCONSISTENT,
|
||||
@ -667,9 +662,8 @@ class TaskAPITestCase(test.TestCase):
|
||||
|
||||
@mock.patch("rally.api.objects.Task")
|
||||
@mock.patch("rally.api.objects.Deployment.get")
|
||||
@mock.patch("rally.api.jsonschema.validate", return_value=True)
|
||||
def test_import_results_with_error_data(
|
||||
self, mock_jsonschema_validate, mock_deployment_get, mock_task):
|
||||
self, mock_deployment_get, mock_task):
|
||||
mock_deployment_get.return_value = fakes.FakeDeployment(
|
||||
uuid="deployment_uuid", admin="fake_admin", users=["fake_user"],
|
||||
status=consts.DeployStatus.DEPLOY_FINISHED)
|
||||
@ -694,8 +688,7 @@ class BaseDeploymentTestCase(test.TestCase):
|
||||
self.deployment_inst = api._Deployment(mock_api)
|
||||
self.deployment_config = copy.deepcopy(FAKE_DEPLOYMENT_CONFIG)
|
||||
self.deployment_uuid = "599bdf1d-fe77-461a-a810-d59b1490f4e3"
|
||||
creds = copy.deepcopy(FAKE_DEPLOYMENT_CONFIG)["creds"]
|
||||
admin_credential = creds["openstack"]
|
||||
admin_credential = copy.deepcopy(self.deployment_config["openstack"])
|
||||
admin_credential["endpoint"] = None
|
||||
admin_credential.update(admin_credential.pop("admin"))
|
||||
admin_credential["permission"] = consts.EndpointPermission.ADMIN
|
||||
@ -711,59 +704,59 @@ class BaseDeploymentTestCase(test.TestCase):
|
||||
|
||||
|
||||
class DeploymentAPITestCase(BaseDeploymentTestCase):
|
||||
@mock.patch("rally.common.objects.deploy.db.deployment_update")
|
||||
@mock.patch("rally.common.objects.deploy.db.deployment_create")
|
||||
@mock.patch("rally.deployment.engines.existing.ExistingCloud.validate")
|
||||
def test_create(self, mock_existing_cloud_validate,
|
||||
mock_deployment_create, mock_deployment_update):
|
||||
mock_deployment_create.return_value = self.deployment
|
||||
mock_deployment_update.return_value = self.deployment
|
||||
@mock.patch("rally.api.objects.Deployment")
|
||||
def test_create(self, mock_deployment):
|
||||
dep = self.deployment_inst.create(config=self.deployment_config,
|
||||
name="fake_deployment")
|
||||
self.assertIsInstance(dep, dict)
|
||||
mock_deployment_create.assert_called_once_with({
|
||||
"name": "fake_deployment",
|
||||
"config": self.deployment_config,
|
||||
})
|
||||
mock_existing_cloud_validate.assert_called_once_with()
|
||||
mock_deployment_update.assert_has_calls([
|
||||
mock.call(self.deployment_uuid,
|
||||
{"credentials": {"openstack": [self.credentials]}})
|
||||
])
|
||||
self.assertEqual(mock_deployment.return_value.to_dict.return_value,
|
||||
dep)
|
||||
mock_deployment.assert_called_once_with(
|
||||
name="fake_deployment",
|
||||
config=self.deployment_config,
|
||||
extras={})
|
||||
|
||||
@mock.patch("rally.common.objects.deploy.db.deployment_update")
|
||||
@mock.patch("rally.common.objects.deploy.db.deployment_create")
|
||||
@mock.patch("rally.deployment.engines.existing.ExistingCloud.validate",
|
||||
side_effect=jsonschema.ValidationError("ValidationError"))
|
||||
def test_create_validation_error(
|
||||
self, mock_existing_cloud_validate, mock_deployment_create,
|
||||
mock_deployment_update):
|
||||
mock_deployment_create.return_value = self.deployment
|
||||
self.assertRaises(jsonschema.ValidationError,
|
||||
self.deployment_inst.create,
|
||||
config=self.deployment_config,
|
||||
name="fake_deployment")
|
||||
mock_deployment_update.assert_called_once_with(
|
||||
self.deployment_uuid,
|
||||
{"status": consts.DeployStatus.DEPLOY_FAILED})
|
||||
@mock.patch("rally.api.objects.Deployment")
|
||||
def test_create_duplicate(self, mock_deployment):
|
||||
|
||||
@mock.patch("rally.api.LOG")
|
||||
@mock.patch("rally.common.objects.deploy.db.deployment_create",
|
||||
side_effect=exceptions.DBRecordExists(
|
||||
field="name", value="fake_deploy", table="deployments"))
|
||||
def test_create_duplication_error(self, mock_deployment_create, mock_log):
|
||||
self.assertRaises(exceptions.DBRecordExists,
|
||||
self.deployment_inst.create,
|
||||
config=self.deployment_config,
|
||||
name="fake_deployment")
|
||||
exc = exceptions.DBRecordExists(
|
||||
field="name", value="fake_deployment", table="envs")
|
||||
|
||||
@mock.patch("rally.common.objects.deploy.db.deployment_delete")
|
||||
@mock.patch("rally.common.objects.deploy.db.deployment_update")
|
||||
@mock.patch("rally.common.objects.deploy.db.deployment_get")
|
||||
def test_destroy(self, mock_deployment_get, mock_deployment_update,
|
||||
mock_deployment_delete):
|
||||
mock_deployment_get.return_value = self.deployment
|
||||
mock_deployment_update.return_value = self.deployment
|
||||
mock_deployment.side_effect = exc
|
||||
|
||||
a_exc = self.assertRaises(
|
||||
exceptions.DBRecordExists,
|
||||
self.deployment_inst.create, config=self.deployment_config,
|
||||
name="fake_deployment")
|
||||
self.assertEqual(exc, a_exc)
|
||||
|
||||
@mock.patch("rally.api.objects.Deployment")
|
||||
def test_create_with_old_cfg(self, mock_deployment):
|
||||
mock_deployment.return_value.spec = ""
|
||||
|
||||
config = {"type": "ExistingCloud",
|
||||
"creds": self.deployment_config}
|
||||
|
||||
dep = self.deployment_inst.create(config=config,
|
||||
name="fake_deployment")
|
||||
self.assertEqual(mock_deployment.return_value.to_dict.return_value,
|
||||
dep)
|
||||
mock_deployment.assert_called_once_with(
|
||||
name="fake_deployment",
|
||||
config=self.deployment_config,
|
||||
extras={})
|
||||
|
||||
config = {"type": "Something",
|
||||
"creds": self.deployment_config}
|
||||
|
||||
e = self.assertRaises(
|
||||
exceptions.RallyException,
|
||||
self.deployment_inst.create, config=config,
|
||||
name="fake_deployment")
|
||||
self.assertIn("You are using deployment type which doesn't exist.",
|
||||
"%s" % e)
|
||||
|
||||
@mock.patch("rally.common.objects.deploy.env_mgr.EnvManager.get")
|
||||
def test_destroy(self, mock_env_manager_get):
|
||||
|
||||
list_verifiers = [{"name": "f1", "uuid": "1"},
|
||||
{"name": "f2", "uuid": "2"}]
|
||||
@ -771,133 +764,35 @@ class DeploymentAPITestCase(BaseDeploymentTestCase):
|
||||
|
||||
self.deployment_inst.destroy(deployment=self.deployment_uuid)
|
||||
|
||||
mock_deployment_get.assert_called_once_with(self.deployment_uuid)
|
||||
mock_deployment_delete.assert_called_once_with(self.deployment_uuid)
|
||||
self.deployment_inst.api.verifier.list.assert_called_once_with()
|
||||
self.assertEqual(
|
||||
[mock.call(verifier_id=m["name"],
|
||||
deployment_id=self.deployment["name"],
|
||||
force=True)
|
||||
for m in list_verifiers],
|
||||
self.deployment_inst.api.verifier.delete.call_args_list)
|
||||
mock_env_manager_get.assert_called_once_with(self.deployment_uuid)
|
||||
mock_env_manager_get.return_value.destroy.assert_called_once_with(
|
||||
skip_cleanup=True
|
||||
)
|
||||
|
||||
@mock.patch("rally.common.objects.deploy.db.deployment_update")
|
||||
@mock.patch("rally.common.objects.deploy.db.deployment_get")
|
||||
def test_recreate(self, mock_deployment_get, mock_deployment_update):
|
||||
mock_deployment_get.return_value = self.deployment
|
||||
mock_deployment_update.return_value = self.deployment
|
||||
self.deployment_inst.recreate(deployment=self.deployment_uuid)
|
||||
mock_deployment_get.assert_called_once_with(self.deployment_uuid)
|
||||
mock_deployment_update.assert_has_calls([
|
||||
mock.call(
|
||||
self.deployment_uuid,
|
||||
{"credentials":
|
||||
{"openstack": [{"admin": self.credentials["admin"],
|
||||
"users": self.credentials["users"]}]}})
|
||||
])
|
||||
def test_recreate(self):
|
||||
e = self.assertRaises(exceptions.RallyException,
|
||||
self.deployment_inst.recreate, deployment="")
|
||||
self.assertIn("Sorry, but recreate method", "%s" % e)
|
||||
|
||||
@mock.patch("rally.common.objects.deploy.db.deployment_update")
|
||||
@mock.patch("rally.common.objects.deploy.db.deployment_get")
|
||||
def test_recreate_config(self, mock_deployment_get,
|
||||
mock_deployment_update):
|
||||
mock_deployment_get.return_value = self.deployment
|
||||
mock_deployment_update.return_value = self.deployment
|
||||
config = copy.deepcopy(self.deployment_config)
|
||||
config["creds"]["openstack"]["admin"] = {
|
||||
"username": "admin",
|
||||
"password": "pass1",
|
||||
"tenant_name": "demo"}
|
||||
config["creds"]["openstack"]["users"] = [
|
||||
{"username": "user1",
|
||||
"password": "pass2",
|
||||
"tenant_name": "demo"}]
|
||||
|
||||
self.deployment_inst.recreate(deployment=self.deployment_uuid,
|
||||
config=config)
|
||||
mock_deployment_get.assert_called_once_with(self.deployment_uuid)
|
||||
mock_deployment_update.assert_has_calls([
|
||||
mock.call(self.deployment_uuid, {"config": config}),
|
||||
])
|
||||
|
||||
@mock.patch("rally.common.objects.deploy.db.deployment_update")
|
||||
@mock.patch("rally.common.objects.deploy.db.deployment_get")
|
||||
def test_recreate_old_config(self, mock_deployment_get,
|
||||
mock_deployment_update):
|
||||
mock_deployment_get.return_value = self.deployment
|
||||
mock_deployment_update.return_value = self.deployment
|
||||
config = copy.deepcopy(self.deployment_config["creds"])
|
||||
config["openstack"]["admin"] = {
|
||||
"username": "admin",
|
||||
"password": "pass1",
|
||||
"tenant_name": "demo"}
|
||||
config["openstack"]["users"] = [
|
||||
{"username": "user1",
|
||||
"password": "pass2",
|
||||
"tenant_name": "demo"}]
|
||||
|
||||
self.deployment_inst.recreate(deployment=self.deployment_uuid,
|
||||
config=config)
|
||||
mock_deployment_get.assert_called_once_with(self.deployment_uuid)
|
||||
mock_deployment_update.assert_has_calls([
|
||||
mock.call(self.deployment_uuid,
|
||||
{"config": {"type": "ExistingCloud", "creds": config}}),
|
||||
])
|
||||
|
||||
@mock.patch("rally.common.objects.deploy.db.deployment_update")
|
||||
@mock.patch("rally.common.objects.deploy.db.deployment_get")
|
||||
def test_recreate_config_invalid(self, mock_deployment_get,
|
||||
mock_deployment_update):
|
||||
mock_deployment_get.return_value = self.deployment
|
||||
mock_deployment_update.return_value = self.deployment
|
||||
config = copy.deepcopy(self.deployment_config)
|
||||
config["admin"] = {"foo": "bar"}
|
||||
|
||||
self.assertRaises(jsonschema.ValidationError,
|
||||
self.deployment_inst.recreate,
|
||||
deployment=self.deployment_uuid,
|
||||
config=config)
|
||||
|
||||
@mock.patch("rally.common.objects.deploy.db.deployment_update")
|
||||
@mock.patch("rally.common.objects.deploy.db.deployment_get")
|
||||
def test_recreate_config_wrong_type(self, mock_deployment_get,
|
||||
mock_deployment_update):
|
||||
mock_deployment_get.return_value = self.deployment
|
||||
mock_deployment_update.return_value = self.deployment
|
||||
config = copy.deepcopy(self.deployment_config)
|
||||
config["type"] = "foo"
|
||||
|
||||
self.assertRaises(exceptions.RallyException,
|
||||
self.deployment_inst.recreate,
|
||||
deployment=self.deployment_uuid,
|
||||
config=config)
|
||||
|
||||
@mock.patch("rally.common.objects.deploy.db.deployment_get")
|
||||
def test_get(self, mock_deployment_get):
|
||||
@mock.patch("rally.common.objects.deploy.env_mgr.EnvManager.get")
|
||||
def test_get(self, mock_env_manager_get):
|
||||
origin_config = copy.deepcopy(self.deployment_config)
|
||||
deployment_id = "aaaa-bbbb-cccc-dddd"
|
||||
mock_deployment_get.return_value = self.deployment
|
||||
ret = self.deployment_inst.get(deployment=deployment_id)
|
||||
mock_env_manager_get.return_value.data = {
|
||||
"spec": self.deployment_config,
|
||||
"platforms": [],
|
||||
"id": self.id(),
|
||||
"uuid": self.deployment["uuid"],
|
||||
"extras": {},
|
||||
"name": self.deployment["name"],
|
||||
"created_at": mock.Mock(),
|
||||
"updated_at": mock.Mock()}
|
||||
ret = self.deployment_inst.get(deployment=self.deployment["uuid"])
|
||||
for key in self.deployment:
|
||||
self.assertIn(key, ret)
|
||||
if key != "config":
|
||||
self.assertEqual(self.deployment[key], ret[key])
|
||||
self.assertEqual(origin_config["creds"], ret["config"])
|
||||
|
||||
@mock.patch("rally.common.objects.deploy.db.deployment_get")
|
||||
def test_get_deprecated_formats(self, mock_deployment_get):
|
||||
origin_config = copy.deepcopy(self.deployment_config)
|
||||
self.deployment_config.update(
|
||||
**self.deployment_config.pop("creds")["openstack"])
|
||||
deployment_id = "aaaa-bbbb-cccc-dddd"
|
||||
mock_deployment_get.return_value = self.deployment
|
||||
ret = self.deployment_inst.get(deployment=deployment_id)
|
||||
for key in self.deployment:
|
||||
self.assertIn(key, ret)
|
||||
if key != "config":
|
||||
self.assertEqual(self.deployment[key], ret[key])
|
||||
origin_config.pop("type")
|
||||
|
||||
self.assertEqual(origin_config["creds"], ret["config"])
|
||||
if key not in ("credentials", "config"):
|
||||
self.assertEqual(self.deployment[key], ret[key],
|
||||
"The key '%s' differs." % key)
|
||||
self.assertEqual(origin_config, ret["config"])
|
||||
|
||||
@mock.patch("rally.common.objects.Deployment.list")
|
||||
def test_list(self, mock_deployment_list):
|
||||
|
Loading…
x
Reference in New Issue
Block a user