Merge "[Sahara] Split Data Sources context"
This commit is contained in:
commit
8347790441
@ -25,9 +25,9 @@ from rally.task import context
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@context.configure(name="sahara_data_sources", order=443)
|
||||
class SaharaDataSources(context.Context):
|
||||
"""Context class for setting up Data Sources for an EDP job."""
|
||||
@context.configure(name="sahara_input_data_sources", order=443)
|
||||
class SaharaInputDataSources(context.Context):
|
||||
"""Context class for setting up Input Data Sources for an EDP job."""
|
||||
|
||||
CONFIG_SCHEMA = {
|
||||
"type": "object",
|
||||
@ -39,30 +39,18 @@ class SaharaDataSources(context.Context):
|
||||
"input_url": {
|
||||
"type": "string",
|
||||
},
|
||||
"output_type": {
|
||||
"enum": ["swift", "hdfs"],
|
||||
},
|
||||
"output_url_prefix": {
|
||||
"type": "string",
|
||||
}
|
||||
},
|
||||
"additionalProperties": False,
|
||||
"required": ["input_type", "input_url",
|
||||
"output_type", "output_url_prefix"]
|
||||
"required": ["input_type", "input_url"]
|
||||
}
|
||||
|
||||
@logging.log_task_wrapper(LOG.info,
|
||||
_("Enter context: `Sahara Data Sources`"))
|
||||
_("Enter context: `Sahara Input Data Sources`"))
|
||||
def setup(self):
|
||||
self.context["sahara_output_conf"] = {
|
||||
"output_type": self.config["output_type"],
|
||||
"output_url_prefix": self.config["output_url_prefix"]
|
||||
}
|
||||
for user, tenant_id in rutils.iterate_per_tenants(
|
||||
self.context["users"]):
|
||||
clients = osclients.Clients(user["endpoint"])
|
||||
sahara = clients.sahara()
|
||||
|
||||
self.setup_inputs(sahara, tenant_id, self.config["input_type"],
|
||||
self.config["input_url"])
|
||||
|
||||
@ -79,7 +67,8 @@ class SaharaDataSources(context.Context):
|
||||
|
||||
self.context["tenants"][tenant_id]["sahara_input"] = input_ds.id
|
||||
|
||||
@logging.log_task_wrapper(LOG.info, _("Exit context: `Sahara EDP`"))
|
||||
@logging.log_task_wrapper(LOG.info, _("Exit context: `Sahara Input Data"
|
||||
"Sources`"))
|
||||
def cleanup(self):
|
||||
resources = ["job_executions", "jobs", "data_sources"]
|
||||
|
@ -0,0 +1,59 @@
|
||||
# Copyright 2015: Mirantis Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from rally.common.i18n import _
|
||||
from rally.common import log as logging
|
||||
from rally.common import utils as rutils
|
||||
from rally import consts
|
||||
from rally.task import context
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@context.configure(name="sahara_output_data_sources", order=444)
|
||||
class SaharaOutputDataSources(context.Context):
|
||||
"""Context class for setting up Output Data Sources for an EDP job."""
|
||||
|
||||
CONFIG_SCHEMA = {
|
||||
"type": "object",
|
||||
"$schema": consts.JSON_SCHEMA,
|
||||
"properties": {
|
||||
"output_type": {
|
||||
"enum": ["swift", "hdfs"],
|
||||
},
|
||||
"output_url_prefix": {
|
||||
"type": "string",
|
||||
}
|
||||
},
|
||||
"additionalProperties": False,
|
||||
"required": ["output_type", "output_url_prefix"]
|
||||
}
|
||||
|
||||
@logging.log_task_wrapper(LOG.info,
|
||||
_("Enter context: `Sahara Output Data Sources`"))
|
||||
def setup(self):
|
||||
for user, tenant_id in rutils.iterate_per_tenants(
|
||||
self.context["users"]):
|
||||
self.context["tenants"][tenant_id]["sahara_output_conf"] = {
|
||||
"output_type": self.config["output_type"],
|
||||
"output_url_prefix": self.config["output_url_prefix"]}
|
||||
|
||||
@logging.log_task_wrapper(LOG.info, _("Exit context: `Sahara Output Data"
|
||||
"Sources`"))
|
||||
def cleanup(self):
|
||||
# TODO(esikachev): Cleanup must iterate by output_url_prefix of
|
||||
# resources from setup() and delete them
|
||||
pass
|
@ -35,10 +35,12 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
"sahara_data_sources": {
|
||||
"input_type": "hdfs",
|
||||
"sahara_input_data_sources": {
|
||||
"input_type": "hdfs",
|
||||
"input_url": "/"
|
||||
},
|
||||
"sahara_output_data_sources": {
|
||||
"output_type": "hdfs",
|
||||
"input_url": "/",
|
||||
"output_url_prefix": "/out_"
|
||||
},
|
||||
"sahara_cluster": {
|
||||
|
@ -26,10 +26,11 @@
|
||||
-
|
||||
name: "udf.jar"
|
||||
download_url: "https://github.com/openstack/sahara/blob/master/etc/edp-examples/pig-job/udf.jar?raw=true"
|
||||
sahara_data_sources:
|
||||
sahara_input_data_sources:
|
||||
input_type: "hdfs"
|
||||
output_type: "hdfs"
|
||||
input_url: "/"
|
||||
sahara_output_data_sources:
|
||||
output_type: "hdfs"
|
||||
output_url_prefix: "/out_"
|
||||
sahara_cluster:
|
||||
flavor_id: "2"
|
||||
|
@ -14,17 +14,16 @@
|
||||
|
||||
import mock
|
||||
|
||||
from rally.plugins.openstack.context.sahara import sahara_data_sources
|
||||
from rally.plugins.openstack.context.sahara import sahara_input_data_sources
|
||||
from tests.unit import test
|
||||
|
||||
|
||||
CTX = "rally.plugins.openstack.context.sahara"
|
||||
|
||||
|
||||
class SaharaDataSourcesTestCase(test.ScenarioTestCase):
|
||||
class SaharaInputDataSourcesTestCase(test.ScenarioTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(SaharaDataSourcesTestCase, self).setUp()
|
||||
super(SaharaInputDataSourcesTestCase, self).setUp()
|
||||
self.tenants_num = 2
|
||||
self.users_per_tenant = 2
|
||||
self.users = self.tenants_num * self.users_per_tenant
|
||||
@ -51,11 +50,9 @@ class SaharaDataSourcesTestCase(test.ScenarioTestCase):
|
||||
"tenants": self.tenants_num,
|
||||
"users_per_tenant": self.users_per_tenant,
|
||||
},
|
||||
"sahara_data_sources": {
|
||||
"sahara_input_data_sources": {
|
||||
"input_type": "hdfs",
|
||||
"output_type": "hdfs",
|
||||
"input_url": "hdfs://test_host/",
|
||||
"output_url_prefix": "hdfs://test_host/out_"
|
||||
},
|
||||
},
|
||||
"admin": {"endpoint": mock.MagicMock()},
|
||||
@ -63,14 +60,15 @@ class SaharaDataSourcesTestCase(test.ScenarioTestCase):
|
||||
"tenants": self.tenants
|
||||
})
|
||||
|
||||
@mock.patch("%s.sahara_data_sources.resource_manager.cleanup" % CTX)
|
||||
@mock.patch("%s.sahara_data_sources.osclients" % CTX)
|
||||
@mock.patch("%s.sahara_input_data_sources.resource_manager.cleanup" % CTX)
|
||||
@mock.patch("%s.sahara_input_data_sources.osclients" % CTX)
|
||||
def test_setup_and_cleanup(self, mock_osclients, mock_cleanup):
|
||||
|
||||
mock_sahara = mock_osclients.Clients(mock.MagicMock()).sahara()
|
||||
mock_sahara.data_sources.create.return_value = mock.MagicMock(id=42)
|
||||
|
||||
sahara_ctx = sahara_data_sources.SaharaDataSources(self.context)
|
||||
sahara_ctx = sahara_input_data_sources.SaharaInputDataSources(
|
||||
self.context)
|
||||
sahara_ctx.generate_random_name = mock.Mock()
|
||||
|
||||
input_ds_crete_calls = []
|
||||
@ -84,7 +82,8 @@ class SaharaDataSourcesTestCase(test.ScenarioTestCase):
|
||||
|
||||
sahara_ctx.setup()
|
||||
|
||||
mock_sahara.data_sources.create.assert_has_calls(input_ds_crete_calls)
|
||||
mock_sahara.data_sources.create.assert_has_calls(
|
||||
input_ds_crete_calls)
|
||||
|
||||
sahara_ctx.cleanup()
|
||||
|
@ -0,0 +1,32 @@
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from rally.plugins.openstack.context.sahara import sahara_output_data_sources
|
||||
from tests.unit import test
|
||||
|
||||
|
||||
class SaharaOutputDataSourcesTestCase(test.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(SaharaOutputDataSourcesTestCase, self).setUp()
|
||||
|
||||
def check_setup(self):
|
||||
context = sahara_output_data_sources.SaharaOutputDataSources.context[
|
||||
"sahara_output_conf"]
|
||||
self.assertIsNotNone(context.get("output_type"))
|
||||
self.assertIsNotNone(context.get("output_url_prefix"))
|
||||
|
||||
def check_cleanup(self):
|
||||
self.assertIsNone(
|
||||
sahara_output_data_sources.SaharaOutputDataSources.cleanup())
|
Loading…
Reference in New Issue
Block a user