Add unit-tests for new integration tests

Adding unit-tests for scenario tests. Now it is missing

partially implements bp: scenario-integration-tests

Change-Id: I2de9aad5abb58a9e0761ee506e650257fc773c75
This commit is contained in:
Evgeny Sikachev 2015-02-12 16:24:12 +03:00
parent 919b184c11
commit 18a73207f7
6 changed files with 715 additions and 0 deletions

View File

View File

@ -0,0 +1,369 @@
# Copyright (c) 2015 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from saharaclient.api import cluster_templates
from saharaclient.api import clusters
from saharaclient.api import data_sources
from saharaclient.api import images
from saharaclient.api import job_binaries
from saharaclient.api import job_binary_internals
from saharaclient.api import job_executions
from saharaclient.api import jobs
from saharaclient.api import node_group_templates
from saharaclient.api import plugins
from tempest_lib import exceptions as exc
import testtools
from sahara.tests.scenario import base
class FakeSaharaClient(object):
def __init__(self):
self.clusters = clusters.ClusterManager(None)
self.cluster_templates = cluster_templates.ClusterTemplateManager(None)
self.node_group_templates = (node_group_templates.
NodeGroupTemplateManager(None))
self.plugins = plugins.PluginManager(None)
self.images = images.ImageManager(None)
self.data_sources = data_sources.DataSourceManager(None)
self.jobs = jobs.JobsManager(None)
self.job_executions = job_executions.JobExecutionsManager(None)
self.job_binaries = job_binaries.JobBinariesManager(None)
self.job_binary_internals = (
job_binary_internals.JobBinaryInternalsManager(None))
class FakeResponse(object):
def __init__(self, set_id=None, set_status=None):
self.id = set_id
self.status = set_status
class TestBase(testtools.TestCase):
def setUp(self):
super(TestBase, self).setUp()
with mock.patch(
'sahara.tests.scenario.base.BaseTestCase.__init__'
) as mock_init:
mock_init.return_value = None
self.base_scenario = base.BaseTestCase()
self.base_scenario.credentials = {'os_username': 'admin',
'os_password': 'nova',
'os_tenant': 'admin',
'os_auth_url':
'http://localhost:5000/v2.0',
'sahara_url':
'http://sahara_host:8386/v1.1'}
self.base_scenario.plugin_opts = {'plugin_name': 'vanilla',
'hadoop_version': '2.6.0'}
self.base_scenario.network = {'type': 'neutron',
'private_network': 'changed_private',
'public_network': 'changed_public',
'auto_assignment_floating_ip': False}
self.base_scenario.testcase = {
'node_group_templates': [
{
'name': 'master',
'node_processes': ['namenode', 'oozie', 'resourcemanager'],
'flavor_id': '2'
},
{
'name': 'worker',
'node_processes': ['datanode', 'nodemanager'],
'flavor_id': '2'
}],
'cluster_template':
{
'name': 'test_name_ct',
'node_group_templates':
{
'master': 1,
'worker': 3
}
},
'retain_resources': True,
'image': 'image_name',
"edp_jobs_flow":
{
"test_flow":
[{
"type": "Pig",
"input_datasource":
{
"type": "swift",
"source": "etc/edp-examples/edp-pig/"
"top-todoers/data/input"
},
"output_datasource":
{
"type": "hdfs",
"destination": "/user/hadoop/edp-output"
},
"main_lib":
{
"type": "swift",
"source": "etc/edp-examples/edp-pig/"
"top-todoers/example.pig"
}
}],
},
}
self.base_scenario.ng_id_map = {'worker': 'set_id', 'master': 'set_id'}
self.base_scenario.ng_name_map = {}
self.base_scenario.template_path = ('sahara/tests/scenario/templates/'
'vanilla/2.6.0')
self.job = self.base_scenario.testcase["edp_jobs_flow"].get(
'test_flow')[0]
self.base_scenario.setUpClass()
@mock.patch('saharaclient.client.Client', return_value=None)
@mock.patch('novaclient.client.Client', return_value=None)
@mock.patch('neutronclient.neutron.client.Client', return_value=None)
@mock.patch('swiftclient.client.Connection', return_value=None)
def test__init_clients(self, swift, neutron, nova, sahara):
self.base_scenario._init_clients()
sahara.assert_called_with('1.1',
username='admin',
api_key='nova',
project_name='admin',
auth_url='http://localhost:5000/v2.0',
sahara_url='http://sahara_host:8386/v1.1')
nova.assert_called_with('2',
username='admin',
api_key='nova',
project_id='admin',
auth_url='http://localhost:5000/v2.0')
neutron.assert_called_with('2.0',
username='admin',
password='nova',
tenant_name='admin',
auth_url='http://localhost:5000/v2.0')
swift.assert_called_with(auth_version='2.0',
user='admin',
key='nova',
tenant_name='admin',
authurl='http://localhost:5000/v2.0')
@mock.patch('sahara.tests.scenario.clients.NeutronClient.get_network_id',
return_value='mock_net')
@mock.patch('saharaclient.client.Client',
return_value=FakeSaharaClient())
@mock.patch('saharaclient.api.node_group_templates.'
'NodeGroupTemplateManager.create',
return_value=FakeResponse(set_id='id_ng'))
def test__create_node_group_template(self, mock_ng, mock_saharaclient,
mock_neutron):
self.base_scenario._init_clients()
self.assertEqual({'worker': 'id_ng', 'master': 'id_ng'},
self.base_scenario._create_node_group_templates())
@mock.patch('sahara.tests.scenario.clients.NeutronClient.get_network_id',
return_value='mock_net')
@mock.patch('saharaclient.client.Client', return_value=FakeSaharaClient())
@mock.patch('saharaclient.api.cluster_templates.'
'ClusterTemplateManager.create',
return_value=FakeResponse(set_id='id_ct'))
def test__create_cluster_template(self, mock_ct, mock_saharaclient,
mock_neutron):
self.base_scenario._init_clients()
self.assertEqual('id_ct',
self.base_scenario._create_cluster_template())
@mock.patch('sahara.tests.scenario.clients.NovaClient.get_image_id',
return_value='mock_image')
@mock.patch('saharaclient.client.Client', return_value=FakeSaharaClient())
@mock.patch('saharaclient.api.clusters.ClusterManager.create',
return_value=FakeResponse(set_id='id_cluster'))
def test__create_cluster(self, mock_cluster_manager, mock_saharaclient,
mock_nova):
self.base_scenario._init_clients()
self.assertEqual('id_cluster',
self.base_scenario._create_cluster('id_ct'))
@mock.patch('saharaclient.client.Client', return_value=FakeSaharaClient())
@mock.patch('sahara.tests.scenario.clients.NeutronClient.get_network_id',
return_value='mock_net')
@mock.patch('saharaclient.api.base.ResourceManager._get',
return_value=FakeResponse(set_status='Active'))
def test__poll_cluster_status(self, mock_status, mock_neutron,
mock_saharaclient):
self.base_scenario._init_clients()
self.assertIsNone(
self.base_scenario._poll_cluster_status('id_cluster'))
@mock.patch('saharaclient.api.base.ResourceManager._update',
return_value=FakeResponse(set_id='id_internal_db_data'))
@mock.patch('saharaclient.client.Client', return_value=FakeSaharaClient())
def test__create_internal_db_data(self, mock_saharaclient, mock_update):
self.base_scenario._init_clients()
self.assertEqual('internal-db://id_internal_db_data',
self.base_scenario._create_internal_db_data(
'sahara/tests/scenario_unit/vanilla2_6_0.yaml'))
@mock.patch('swiftclient.client.Connection.put_container',
return_value=None)
@mock.patch('saharaclient.client.Client', return_value=FakeSaharaClient())
def test__create_swift_data(self, mock_saharaclient, mock_swiftclient):
self.base_scenario._init_clients()
self.assertTrue('swift://sahara-tests-' in
self.base_scenario._create_swift_data())
@mock.patch('swiftclient.client.Connection.put_container',
return_value=None)
@mock.patch('saharaclient.client.Client', return_value=FakeSaharaClient())
def test__get_swift_container(self, mock_saharaclient,
mock_swiftclient):
self.base_scenario._init_clients()
self.assertTrue('sahara-tests-' in
self.base_scenario._get_swift_container())
@mock.patch('saharaclient.api.base.ResourceManager._create',
return_value=FakeResponse(set_id='id_for_datasource'))
@mock.patch('swiftclient.client.Connection.put_container',
return_value=None)
@mock.patch('swiftclient.client.Connection.put_object',
return_value=None)
@mock.patch('saharaclient.client.Client', return_value=FakeSaharaClient())
def test__create_datasources(self, mock_saharaclient,
mock_swiftcontainer, mock_swiftobject,
mock_create):
self.base_scenario._init_clients()
self.assertEqual(('id_for_datasource', 'id_for_datasource'),
self.base_scenario._create_datasources(
self.job))
@mock.patch('saharaclient.api.base.ResourceManager._create',
return_value=FakeResponse(set_id='id_for_job_binaries'))
@mock.patch('swiftclient.client.Connection.put_object',
return_value=None)
@mock.patch('swiftclient.client.Connection.put_container',
return_value=None)
@mock.patch('saharaclient.client.Client', return_value=FakeSaharaClient())
def test__create_create_job_binaries(self, mock_saharaclient,
mock_swiftcontainer,
mock_swiftobject,
mock_sahara_create):
self.base_scenario._init_clients()
self.assertEqual((['id_for_job_binaries'], []),
self.base_scenario._create_job_binaries(
self.job))
@mock.patch('saharaclient.api.base.ResourceManager._create',
return_value=FakeResponse(set_id='id_for_job_binary'))
@mock.patch('swiftclient.client.Connection.put_object',
return_value=None)
@mock.patch('swiftclient.client.Connection.put_container',
return_value=None)
@mock.patch('saharaclient.client.Client', return_value=FakeSaharaClient())
def test__create_create_job_binary(self, mock_saharaclient,
mock_swiftcontainer, mock_swiftobject,
mock_sahara_create):
self.base_scenario._init_clients()
self.assertEqual('id_for_job_binary',
self.base_scenario._create_job_binary(self.job.get(
'input_datasource')))
@mock.patch('saharaclient.api.base.ResourceManager._create',
return_value=FakeResponse(set_id='id_for_job'))
@mock.patch('saharaclient.client.Client', return_value=FakeSaharaClient())
def test__create_job(self, mock_client, mock_sahara_client):
self.base_scenario._init_clients()
self.assertEqual('id_for_job',
self.base_scenario._create_job(
'Pig',
['id_for_job_binaries'],
[]))
@mock.patch('sahara.tests.scenario.clients.SaharaClient.get_job_status',
return_value='SUCCEEDED')
@mock.patch('saharaclient.api.base.ResourceManager._get',
return_value=FakeResponse(set_id='id_for_run_job_get'))
@mock.patch('saharaclient.api.base.ResourceManager._create',
return_value=FakeResponse(set_id='id_for_run_job_create'))
@mock.patch('sahara.tests.scenario.base.BaseTestCase.'
'_poll_cluster_status',
return_value=None)
@mock.patch('sahara.tests.scenario.base.BaseTestCase.'
'_create_node_group_templates',
return_value='id_node_group_template')
@mock.patch('sahara.tests.scenario.base.BaseTestCase.'
'_create_cluster_template',
return_value='id_cluster_template')
@mock.patch('sahara.tests.scenario.base.BaseTestCase._create_cluster',
return_value='id_cluster')
@mock.patch('sahara.tests.scenario.base.BaseTestCase._create_job',
return_value='id_for_job')
@mock.patch('sahara.tests.scenario.base.BaseTestCase._create_job_binaries',
return_value=(['id_for_job_binaries'], []))
@mock.patch('sahara.tests.scenario.base.BaseTestCase._create_datasources',
return_value=('id_for_datasource', 'id_for_datasource'))
@mock.patch('saharaclient.client.Client', return_value=FakeSaharaClient())
def test_check_run_jobs(self, mock_saharaclient, mock_datasources,
mock_job_binaries, mock_job,
mock_node_group_template, mock_cluster_template,
mock_cluster, mock_cluster_status, mock_create,
mock_get, mock_client):
self.base_scenario._init_clients()
self.base_scenario.create_cluster()
self.base_scenario.testcase["edp_jobs_flow"] = [
{
"type": "Pig",
"input_datasource": {
"type": "swift",
"source": "etc/edp-examples/edp-pig/top-todoers/"
"data/input"
},
"output_datasource": {
"type": "hdfs",
"destination": "/user/hadoop/edp-output"
},
"main_lib": {
"type": "swift",
"source": "etc/edp-examples/edp-pig/top-todoers/"
"example.pig"
}
}
]
self.assertIsNone(self.base_scenario.check_run_jobs())
@mock.patch('sahara.tests.scenario.base.BaseTestCase.'
'_poll_cluster_status',
return_value=None)
@mock.patch('saharaclient.api.base.ResourceManager._get',
return_value=FakeResponse(set_id='id_scale_get'))
@mock.patch('saharaclient.api.base.ResourceManager._update',
return_value=FakeResponse(set_id='id_scale_update'))
@mock.patch('saharaclient.client.Client', return_value=FakeSaharaClient())
def test_check_scale(self, mock_saharaclient, mock_update, mock_get,
mock_poll):
self.base_scenario._init_clients()
self.base_scenario.ng_id_map = {'vanilla-worker': 'set_id-w',
'vanilla-master': 'set_id-m'}
self.base_scenario.ng_name_map = {'vanilla-worker': 'worker-123',
'vanilla-master': 'master-321'}
self.base_scenario.cluster_id = 'cluster_id'
self.assertIsNone(self.base_scenario.check_scale())
@mock.patch('saharaclient.client.Client', return_value=FakeSaharaClient())
@mock.patch('sahara.tests.scenario.clients.NeutronClient.get_network_id',
return_value='mock_net')
@mock.patch('saharaclient.api.base.ResourceManager._get',
return_value=FakeResponse(set_status='Error'))
def test_errormsg(self, mock_status, mock_neutron, mock_saharaclient):
self.base_scenario._init_clients()
with testtools.ExpectedException(exc.TempestException):
self.base_scenario._poll_cluster_status('id_cluster')

View File

@ -0,0 +1,216 @@
# Copyright (c) 2015 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import mock
import testtools
from sahara.tests.scenario import runner
class RunnerUnitTest(testtools.TestCase):
def _isDictContainSubset(self, sub_dictionary, dictionary):
for key in sub_dictionary:
if sub_dictionary[key] != dictionary[key]:
return False
return True
def test_set_defaults(self):
config_without_cred_net = {
"clusters": [
{
"plugin_name": "vanilla",
"plugin_version": "2.6.0",
"image": "sahara-vanilla-2.6.0-ubuntu-14.04"
}],
}
expected_default_credential = {
"credentials": {
"os_username": "admin",
"os_auth_url": "http://localhost:5000/v2.0",
"sahara_url": None,
"os_password": "nova",
"os_tenant": "admin"
}
}
expected_default_network = {
"network": {
"type": "neutron",
"private_network": "private",
"public_network": "public",
"auto_assignment_floating_ip": False
},
}
expected_default_cluster = {
"clusters": [
{
"image": "sahara-vanilla-2.6.0-ubuntu-14.04",
"edp_jobs_flow": None,
"class_name": "vanilla2_6_0",
"plugin_name": "vanilla",
"scenario": ['run_jobs', 'scale', 'run_jobs'],
"plugin_version": "2.6.0",
"retain_resources": False
}],
}
runner.set_defaults(config_without_cred_net)
self.assertTrue(self._isDictContainSubset(
expected_default_credential, config_without_cred_net))
self.assertTrue(self._isDictContainSubset(
expected_default_network, config_without_cred_net))
self.assertTrue(self._isDictContainSubset(
expected_default_cluster, config_without_cred_net))
config = {
"credentials": {
"os_username": "changed_admin",
"os_auth_url": "http://127.0.0.1:5000/v2.0",
"sahara_url": "http://127.0.0.1",
"os_password": "changed_nova",
"os_tenant": "changed_admin"
},
"network": {
"type": "neutron",
"private_network": "changed_private",
"public_network": "changed_public",
"auto_assignment_floating_ip": True,
},
"clusters": [
{
"plugin_name": "vanilla",
"plugin_version": "2.6.0",
"image": "sahara-vanilla-2.6.0-ubuntu-14.04",
"edp_jobs_flow": "test_flow",
"retain_resources": True
}],
"edp_jobs_flow": {
"test_flow": [
{
"type": "Pig",
"input_datasource": {
"type": "swift",
"source": "etc/edp-examples/edp-pig/top-todoers/"
"data/input"
},
"output_datasource": {
"type": "hdfs",
"destination": "/user/hadoop/edp-output"
},
"main_lib": {
"type": "swift",
"source": "etc/edp-examples/edp-pig/top-todoers/"
"example.pig"
}
},
{
"type": "Java",
"additional_libs": [
{
"type": "database",
"source": "sahara/tests/integration/tests/"
"resources/"
}],
"configs": "edp.java.main_class: org.apache.hadoop."
"examples.QuasiMonteCarlo",
"args": [10, 10]
},
],
},
}
expected_credential = {
"credentials": {
"os_username": "changed_admin",
"os_auth_url": "http://127.0.0.1:5000/v2.0",
"sahara_url": "http://127.0.0.1",
"os_password": "changed_nova",
"os_tenant": "changed_admin"
},
}
expected_network = {
"network": {
"type": "neutron",
"private_network": "changed_private",
"public_network": "changed_public",
"auto_assignment_floating_ip": True,
},
}
expected_cluster = {
"clusters": [
{
"plugin_name": "vanilla",
"plugin_version": "2.6.0",
"image": "sahara-vanilla-2.6.0-ubuntu-14.04",
"retain_resources": True,
'edp_jobs_flow': [
{
'main_lib': {
'source': 'etc/edp-examples/edp-pig/'
'top-todoers/example.pig',
'type': 'swift'
},
'type': 'Pig',
'input_datasource': {
'source': 'etc/edp-examples/edp-pig/'
'top-todoers/data/input',
'type': 'swift'
},
'output_datasource': {
'type': 'hdfs',
'destination': '/user/hadoop/edp-output'
}
},
{
'args': [10, 10],
'configs': 'edp.java.main_class: org.apache.'
'hadoop.examples.QuasiMonteCarlo',
'type': 'Java',
'additional_libs': [
{
'source': 'sahara/tests/integration/'
'tests/resources/',
'type': 'database'
}]
}
],
"scenario": ['run_jobs', 'scale', 'run_jobs'],
"class_name": "vanilla2_6_0"
}],
}
runner.set_defaults(config)
self.assertTrue(self._isDictContainSubset(
expected_credential, config))
self.assertTrue(self._isDictContainSubset(
expected_network, config))
self.assertTrue(self._isDictContainSubset(
expected_cluster, config))
@mock.patch('sys.exit', return_value=None)
@mock.patch('os.system', return_value=None)
def test_runner_main(self, mock_os, mock_sys):
sys.argv = ['sahara/tests/scenario/runner.py',
'sahara/tests/scenario_unit/vanilla2_6_0.yaml']
runner.main()

View File

@ -0,0 +1,27 @@
# Copyright (c) 2015 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import testtools
import yaml
from sahara.tests.scenario import validation
class TestValidation(testtools.TestCase):
def test_validation(self):
with open("sahara/tests/scenario_unit/vanilla2_6_0.yaml",
"r") as yaml_file:
config = yaml.load(yaml_file)
self.assertIsNone(validation.validate(config))

View File

@ -0,0 +1,97 @@
concurrency: 1
credentials:
os_username: admin
os_password: nova
os_tenant: admin
os_auth_url: http://127.0.0.1:5000/v2.0
network:
private_network: private
public_network: public
clusters:
- plugin_name: vanilla
plugin_version: 2.6.0
image: sahara-vanilla-2.6.0-ubuntu-14.04
node_group_templates:
- name: master
node_processes:
- namenode
- resourcemanager
- hiveserver
- oozie
- historyserver
- secondarynamenode
flavor_id: '2'
- name: worker
node_processes:
- datanode
- nodemanager
flavor_id: '2'
cluster_template:
name: vanilla
node_group_templates:
master: 1
worker: 3
scenario:
- run_jobs
- scale
- run_jobs
edp_jobs_flow: test_flow
retain_resources: true
edp_jobs_flow:
test_flow:
- type: Pig
input_datasource:
type: swift
source: etc/edp-examples/edp-pig/top-todoers/data/input
output_datasource:
type: hdfs
destination: /user/hadoop/edp-output
main_lib:
type: swift
source: etc/edp-examples/edp-pig/top-todoers/example.pig
- type: Java
additional_libs:
- type: database
source: etc/edp-examples/hadoop2/edp-java/hadoop-mapreduce-examples-2.4.1.jar
configs:
edp.java.main_class: org.apache.hadoop.examples.QuasiMonteCarlo
args:
- 10
- 10
- type: MapReduce
configs:
mapred.mapper.class: org.apache.oozie.example.SampleMapper
mapred.reducer.class: org.apache.oozie.example.SampleReducer
additional_libs:
- type: database
source: etc/edp-examples/edp-java/edp-java.jar
input_datasource:
type: swift
source: etc/edp-examples/edp-pig/top-todoers/data/input
output_datasource:
type: hdfs
destination: /user/hadoop/edp-output
- type: MapReduce.Streaming
configs:
edp.streaming.mapper: /bin/cat
edp.streaming.reducer: /usr/bin/wc
input_datasource:
type: swift
source: etc/edp-examples/edp-pig/top-todoers/data/input
output_datasource:
type: hdfs
destination: /user/hadoop/edp-output
- type: Hive
input_datasource:
type: swift
source: etc/edp-examples/edp-hive/input.csv
output_datasource:
type: hdfs
destination: /user/hadoop/edp-hive/
main_lib:
type: swift
source: etc/edp-examples/edp-hive/script.q

View File

@ -25,6 +25,12 @@ commands = bash tools/pretty_tox.sh '{posargs}'
setenv = VIRTUALENV={envdir}
commands = python {toxinidir}/sahara/tests/scenario/runner.py {posargs}
[testenv:py27-scenario-unit]
setenv =
VIRTUAL_ENV={envdir}
DISCOVER_DIRECTORY=sahara/tests/scenario_unit
commands = bash tools/pretty_tox.sh '{posargs}'
[testenv:cover]
commands = python setup.py testr --coverage --testr-args='{posargs}'