Added integration tests with Docker (#17)
* Added integration tests with Docker * Cleanup more data
This commit is contained in:
parent
a81c0554b5
commit
6b6d25124f
6
Dockerfile.integration-tests
Normal file
6
Dockerfile.integration-tests
Normal file
@ -0,0 +1,6 @@
|
||||
FROM themattrix/tox
|
||||
|
||||
COPY tox-integration.ini /app/tox.ini
|
||||
COPY tests /app/tests
|
||||
COPY integration_tests /app/integration_tests
|
||||
COPY integration-test-requirements.txt /app/integration-test-requirements.txt
|
46
docker-compose-integration-tests.yml
Normal file
46
docker-compose-integration-tests.yml
Normal file
@ -0,0 +1,46 @@
|
||||
version: '2'
|
||||
services:
|
||||
api:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
command: api
|
||||
depends_on:
|
||||
- messaging
|
||||
- database
|
||||
environment:
|
||||
MONGODB_URL: mongodb://database:27017/almanach
|
||||
ports:
|
||||
- "80:8000"
|
||||
collector:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
command: collector
|
||||
depends_on:
|
||||
- database
|
||||
- messaging
|
||||
environment:
|
||||
MONGODB_URL: mongodb://database:27017/almanach
|
||||
RABBITMQ_URL: amqp://guest:guest@messaging:5672
|
||||
messaging:
|
||||
image: rabbitmq
|
||||
ports:
|
||||
- "5672:5672"
|
||||
database:
|
||||
image: mongo
|
||||
ports:
|
||||
- "27017:27017"
|
||||
test:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile.integration-tests
|
||||
args:
|
||||
SKIP_TOX: "true"
|
||||
environment:
|
||||
TEST_CONTAINER: "true"
|
||||
depends_on:
|
||||
- api
|
||||
- collector
|
||||
- database
|
||||
- messaging
|
8
integration-test-requirements.txt
Normal file
8
integration-test-requirements.txt
Normal file
@ -0,0 +1,8 @@
|
||||
nose==1.2.1
|
||||
requests==2.10.0
|
||||
kombu>=3.0.21
|
||||
python-dateutil==2.2
|
||||
pymongo==3.2.2
|
||||
pytz>=2014.10
|
||||
PyHamcrest==1.8.5
|
||||
retry>=0.9.2
|
0
integration_tests/__init__.py
Normal file
0
integration_tests/__init__.py
Normal file
42
integration_tests/base_api_testcase.py
Normal file
42
integration_tests/base_api_testcase.py
Normal file
@ -0,0 +1,42 @@
|
||||
# Copyright 2016 Internap.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import unittest
|
||||
|
||||
from uuid import uuid4
|
||||
from hamcrest import equal_to, assert_that
|
||||
|
||||
from helpers.rabbit_mq_helper import RabbitMqHelper
|
||||
from helpers.almanach_helper import AlmanachHelper
|
||||
|
||||
|
||||
class BaseApiTestCase(unittest.TestCase):
|
||||
rabbitMqHelper = RabbitMqHelper()
|
||||
almanachHelper = AlmanachHelper()
|
||||
|
||||
def _create_instance_entity(self):
|
||||
project_id = "my_test_project_id"
|
||||
instance_id = str(uuid4())
|
||||
data = {
|
||||
'id': instance_id,
|
||||
'created_at': '2016-01-01T18:30:00Z',
|
||||
'name': 'integration_test_instance_FlavorA',
|
||||
'flavor': 'FlavorA',
|
||||
'os_type': 'FreeBSD',
|
||||
'os_distro': 'Stable',
|
||||
'os_version': '10',
|
||||
}
|
||||
response = self.almanachHelper.post(url="{url}/project/{project}/instance", data=data, project=project_id)
|
||||
assert_that(response.status_code, equal_to(201))
|
||||
return instance_id
|
0
integration_tests/builders/__init__.py
Normal file
0
integration_tests/builders/__init__.py
Normal file
429
integration_tests/builders/messages.py
Normal file
429
integration_tests/builders/messages.py
Normal file
@ -0,0 +1,429 @@
|
||||
# Copyright 2016 Internap.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
import dateutil.parser
|
||||
import pytz
|
||||
|
||||
DEFAULT_VOLUME_TYPE = "my_block_storage_type"
|
||||
DEFAULT_VOLUME_NAME = "my_block_storage_name"
|
||||
|
||||
|
||||
def get_instance_create_end_sample(instance_id=None, tenant_id=None, flavor_name=None,
|
||||
creation_timestamp=None, name=None, os_distro=None, os_version=None, metadata={}):
|
||||
kwargs = {
|
||||
"instance_id": instance_id or "e7d44dea-21c1-452c-b50c-cbab0d07d7d3",
|
||||
"tenant_id": tenant_id or "0be9215b503b43279ae585d50a33aed8",
|
||||
"hostname": name or "to.to",
|
||||
"display_name": name or "to.to",
|
||||
"instance_type": flavor_name or "myflavor",
|
||||
"os_distro": os_distro or "CentOS",
|
||||
"os_version": os_version or "6.4",
|
||||
"created_at": creation_timestamp if creation_timestamp else datetime(2014, 2, 14, 16, 29, 58, tzinfo=pytz.utc),
|
||||
"launched_at": creation_timestamp + timedelta(seconds=1) if creation_timestamp else datetime(2014, 2, 14, 16,
|
||||
30, 02,
|
||||
tzinfo=pytz.utc),
|
||||
"terminated_at": None,
|
||||
"deleted_at": None,
|
||||
"state": "active",
|
||||
"metadata": metadata
|
||||
}
|
||||
kwargs["timestamp"] = kwargs["launched_at"] + timedelta(microseconds=200000)
|
||||
return _get_instance_payload("compute.instance.create.end", **kwargs)
|
||||
|
||||
|
||||
def get_instance_delete_end_sample(instance_id=None, tenant_id=None, flavor_name=None, os_distro=None, os_version=None,
|
||||
creation_timestamp=None, deletion_timestamp=None, name=None):
|
||||
kwargs = {
|
||||
"instance_id": instance_id,
|
||||
"tenant_id": tenant_id,
|
||||
"hostname": name,
|
||||
"display_name": name,
|
||||
"instance_type": flavor_name,
|
||||
"os_distro": os_distro or "centos",
|
||||
"os_version": os_version or "6.4",
|
||||
"created_at": creation_timestamp if creation_timestamp else datetime(2014, 2, 14, 16, 29, 58, tzinfo=pytz.utc),
|
||||
"launched_at": creation_timestamp + timedelta(seconds=1) if creation_timestamp else datetime(2014, 2, 14, 16,
|
||||
30, 02,
|
||||
tzinfo=pytz.utc),
|
||||
"terminated_at": deletion_timestamp if deletion_timestamp else datetime(2014, 2, 18, 12, 5, 23,
|
||||
tzinfo=pytz.utc),
|
||||
"deleted_at": deletion_timestamp if deletion_timestamp else datetime(2014, 2, 18, 12, 5, 23, tzinfo=pytz.utc),
|
||||
"state": "deleted"
|
||||
}
|
||||
kwargs["timestamp"] = kwargs["terminated_at"] + timedelta(microseconds=200000)
|
||||
return _get_instance_payload("compute.instance.delete.end", **kwargs)
|
||||
|
||||
|
||||
def get_volume_create_end_sample(volume_id=None, tenant_id=None, volume_type=None, volume_size=None,
|
||||
creation_timestamp=None, name=None):
|
||||
kwargs = {
|
||||
"volume_id": volume_id or "64a0ca7f-5f5a-4dc5-a1e1-e04e89eb95ed",
|
||||
"tenant_id": tenant_id or "46eeb8e44298460899cf4b3554bfe11f",
|
||||
"display_name": name or "mytenant-0001-myvolume",
|
||||
"volume_type": volume_type or DEFAULT_VOLUME_TYPE,
|
||||
"volume_size": volume_size or 50,
|
||||
"created_at": creation_timestamp if creation_timestamp else datetime(2014, 2, 14, 17, 18, 35, tzinfo=pytz.utc),
|
||||
"launched_at": creation_timestamp + timedelta(seconds=1) if creation_timestamp else datetime(2014, 2, 14, 17,
|
||||
18, 40,
|
||||
tzinfo=pytz.utc),
|
||||
"status": "available"
|
||||
}
|
||||
kwargs["timestamp"] = kwargs["launched_at"] + timedelta(microseconds=200000)
|
||||
return _get_volume_icehouse_payload("volume.create.end", **kwargs)
|
||||
|
||||
|
||||
def get_volume_delete_end_sample(volume_id=None, tenant_id=None, volume_type=None, volume_size=None,
|
||||
creation_timestamp=None, deletion_timestamp=None, name=None):
|
||||
kwargs = {
|
||||
"volume_id": volume_id or "64a0ca7f-5f5a-4dc5-a1e1-e04e89eb95ed",
|
||||
"tenant_id": tenant_id or "46eeb8e44298460899cf4b3554bfe11f",
|
||||
"display_name": name or "mytenant-0001-myvolume",
|
||||
"volume_type": volume_type or DEFAULT_VOLUME_TYPE,
|
||||
"volume_size": volume_size or 50,
|
||||
"created_at": creation_timestamp if creation_timestamp else datetime(2014, 2, 14, 17, 18, 35, tzinfo=pytz.utc),
|
||||
"launched_at": deletion_timestamp if deletion_timestamp else datetime(2014, 2, 14, 17, 18, 40, tzinfo=pytz.utc),
|
||||
"timestamp": deletion_timestamp if deletion_timestamp else datetime(2014, 2, 23, 8, 1, 58, tzinfo=pytz.utc),
|
||||
"status": "deleting"
|
||||
}
|
||||
return _get_volume_icehouse_payload("volume.delete.end", **kwargs)
|
||||
|
||||
|
||||
def get_volume_attach_icehouse_end_sample(volume_id=None, tenant_id=None, volume_type=None, volume_size=None,
|
||||
creation_timestamp=None, name=None, attached_to=None):
|
||||
kwargs = {
|
||||
"volume_id": volume_id or "64a0ca7f-5f5a-4dc5-a1e1-e04e89eb95ed",
|
||||
"tenant_id": tenant_id or "46eeb8e44298460899cf4b3554bfe11f",
|
||||
"display_name": name or "mytenant-0001-myvolume",
|
||||
"volume_type": volume_type or DEFAULT_VOLUME_TYPE,
|
||||
"volume_size": volume_size or 50,
|
||||
"attached_to": attached_to or "e7d44dea-21c1-452c-b50c-cbab0d07d7d3",
|
||||
"created_at": creation_timestamp if creation_timestamp else datetime(2014, 2, 14, 17, 18, 35, tzinfo=pytz.utc),
|
||||
"launched_at": creation_timestamp + timedelta(seconds=1) if creation_timestamp else datetime(2014, 2, 14, 17,
|
||||
18, 40,
|
||||
tzinfo=pytz.utc),
|
||||
"timestamp": creation_timestamp + timedelta(seconds=1) if creation_timestamp else datetime(2014, 2, 14, 17, 18,
|
||||
40, tzinfo=pytz.utc),
|
||||
}
|
||||
return _get_volume_icehouse_payload("volume.attach.end", **kwargs)
|
||||
|
||||
|
||||
def get_volume_attach_kilo_end_sample(volume_id=None, tenant_id=None, volume_type=None, volume_size=None,
|
||||
timestamp=None, name=None, attached_to=None):
|
||||
kwargs = {
|
||||
"volume_id": volume_id or "64a0ca7f-5f5a-4dc5-a1e1-e04e89eb95ed",
|
||||
"tenant_id": tenant_id or "46eeb8e44298460899cf4b3554bfe11f",
|
||||
"display_name": name or "mytenant-0001-myvolume",
|
||||
"volume_type": volume_type or DEFAULT_VOLUME_TYPE,
|
||||
"volume_size": volume_size or 50,
|
||||
"attached_to": attached_to,
|
||||
"timestamp": timestamp + timedelta(seconds=1) if timestamp else datetime(2014, 2, 14, 17, 18, 40,
|
||||
tzinfo=pytz.utc),
|
||||
}
|
||||
return _get_volume_kilo_payload("volume.attach.end", **kwargs)
|
||||
|
||||
|
||||
def get_volume_detach_kilo_end_sample(volume_id=None, tenant_id=None, volume_type=None, volume_size=None,
|
||||
timestamp=None, name=None, attached_to=None):
|
||||
kwargs = {
|
||||
"volume_id": volume_id or "64a0ca7f-5f5a-4dc5-a1e1-e04e89eb95ed",
|
||||
"tenant_id": tenant_id or "46eeb8e44298460899cf4b3554bfe11f",
|
||||
"display_name": name or "mytenant-0001-myvolume",
|
||||
"volume_type": volume_type or DEFAULT_VOLUME_TYPE,
|
||||
"volume_size": volume_size or 50,
|
||||
"attached_to": attached_to,
|
||||
"timestamp": timestamp + timedelta(seconds=1) if timestamp else datetime(2014, 2, 14, 17, 18, 40,
|
||||
tzinfo=pytz.utc),
|
||||
}
|
||||
return _get_volume_kilo_payload("volume.detach.end", **kwargs)
|
||||
|
||||
|
||||
def get_volume_detach_end_sample(volume_id=None, tenant_id=None, volume_type=None, volume_size=None,
|
||||
creation_timestamp=None, deletion_timestamp=None, name=None):
|
||||
kwargs = {
|
||||
"volume_id": volume_id or "64a0ca7f-5f5a-4dc5-a1e1-e04e89eb95ed",
|
||||
"tenant_id": tenant_id or "46eeb8e44298460899cf4b3554bfe11f",
|
||||
"display_name": name or "mytenant-0001-myvolume",
|
||||
"volume_type": volume_type or DEFAULT_VOLUME_TYPE,
|
||||
"volume_size": volume_size or 50,
|
||||
"attached_to": None,
|
||||
"created_at": creation_timestamp if creation_timestamp else datetime(2014, 2, 14, 17, 18, 35, tzinfo=pytz.utc),
|
||||
"launched_at": creation_timestamp + timedelta(seconds=1) if creation_timestamp else datetime(2014, 2, 14, 17,
|
||||
18, 40,
|
||||
tzinfo=pytz.utc),
|
||||
"timestamp": deletion_timestamp if deletion_timestamp else datetime(2014, 2, 23, 8, 1, 58, tzinfo=pytz.utc),
|
||||
"status": "detach"
|
||||
}
|
||||
return _get_volume_icehouse_payload("volume.detach.end", **kwargs)
|
||||
|
||||
|
||||
def get_volume_rename_end_sample(volume_id=None, tenant_id=None, volume_type=None, volume_size=None,
|
||||
creation_timestamp=None, deletion_timestamp=None, name=None):
|
||||
kwargs = {
|
||||
"volume_id": volume_id or "64a0ca7f-5f5a-4dc5-a1e1-e04e89eb95ed",
|
||||
"tenant_id": tenant_id or "46eeb8e44298460899cf4b3554bfe11f",
|
||||
"display_name": name or "mytenant-0001-mysnapshot01",
|
||||
"volume_type": volume_type or DEFAULT_VOLUME_TYPE,
|
||||
"volume_size": volume_size or 50,
|
||||
"attached_to": None,
|
||||
"created_at": creation_timestamp if creation_timestamp else datetime(2014, 2, 14, 17, 18, 35, tzinfo=pytz.utc),
|
||||
"launched_at": creation_timestamp + timedelta(seconds=1) if creation_timestamp else datetime(2014, 2, 14, 17,
|
||||
18, 40,
|
||||
tzinfo=pytz.utc),
|
||||
"timestamp": deletion_timestamp if deletion_timestamp else datetime(2014, 2, 23, 8, 1, 58, tzinfo=pytz.utc),
|
||||
"status": "detach"
|
||||
}
|
||||
return _get_volume_icehouse_payload("volume.update.end", **kwargs)
|
||||
|
||||
|
||||
def get_volume_exists_sample(volume_id=None, tenant_id=None, volume_type=None, volume_size=None,
|
||||
creation_timestamp=None, deletion_timestamp=None, name=None):
|
||||
kwargs = {
|
||||
"volume_id": volume_id or "64a0ca7f-5f5a-4dc5-a1e1-e04e89eb95ed",
|
||||
"tenant_id": tenant_id or "46eeb8e44298460899cf4b3554bfe11f",
|
||||
"display_name": name or "mytenant-0001-mysnapshot",
|
||||
"volume_type": volume_type or DEFAULT_VOLUME_TYPE,
|
||||
"volume_size": volume_size or 50,
|
||||
"attached_to": None,
|
||||
"created_at": creation_timestamp if creation_timestamp else datetime(2014, 2, 14, 17, 18, 35, tzinfo=pytz.utc),
|
||||
"launched_at": creation_timestamp + timedelta(seconds=1) if creation_timestamp else datetime(2014, 2, 14, 17,
|
||||
18, 40,
|
||||
tzinfo=pytz.utc),
|
||||
"timestamp": deletion_timestamp if deletion_timestamp else datetime(2014, 2, 23, 8, 1, 58, tzinfo=pytz.utc),
|
||||
"status": "detach"
|
||||
}
|
||||
return _get_volume_icehouse_payload("volume.exists", **kwargs)
|
||||
|
||||
|
||||
def _format_date(datetime_obj):
|
||||
return datetime_obj.strftime("%Y-%m-%dT%H:%M:%S.%fZ")
|
||||
|
||||
|
||||
def _get_instance_payload(event_type, instance_id=None, tenant_id=None, hostname=None, display_name=None,
|
||||
instance_type=None,
|
||||
instance_flavor_id=None, timestamp=None, created_at=None, launched_at=None,
|
||||
deleted_at=None, terminated_at=None, state=None, os_type=None, os_distro=None,
|
||||
os_version=None, metadata={}):
|
||||
instance_id = instance_id or "e7d44dea-21c1-452c-b50c-cbab0d07d7d3"
|
||||
os_type = os_type or "linux"
|
||||
os_distro = os_distro or "centos"
|
||||
os_version = os_version or "6.4"
|
||||
hostname = hostname or "to.to"
|
||||
display_name = display_name or "to.to"
|
||||
tenant_id = tenant_id or "0be9215b503b43279ae585d50a33aed8"
|
||||
instance_type = instance_type or "myflavor"
|
||||
instance_flavor_id = instance_flavor_id or "201"
|
||||
timestamp = timestamp if timestamp else "2014-02-14T16:30:10.453532Z"
|
||||
created_at = _format_date(created_at) if created_at else "2014-02-14T16:29:58.000000Z"
|
||||
launched_at = _format_date(launched_at) if launched_at else "2014-02-14T16:30:10.221171Z"
|
||||
deleted_at = _format_date(deleted_at) if deleted_at else ""
|
||||
terminated_at = _format_date(terminated_at) if terminated_at else ""
|
||||
state = state or "active"
|
||||
metadata = metadata
|
||||
|
||||
if not isinstance(timestamp, datetime):
|
||||
timestamp = dateutil.parser.parse(timestamp)
|
||||
|
||||
return {
|
||||
"event_type": event_type,
|
||||
"payload": {
|
||||
"state_description": "",
|
||||
"availability_zone": None,
|
||||
"terminated_at": terminated_at,
|
||||
"ephemeral_gb": 0,
|
||||
"instance_type_id": 12,
|
||||
"message": "Success",
|
||||
"deleted_at": deleted_at,
|
||||
"memory_mb": 1024,
|
||||
"user_id": "2525317304464dc3a03f2a63e99200c8",
|
||||
"reservation_id": "r-7e68nhfk",
|
||||
"hostname": hostname,
|
||||
"state": state,
|
||||
"launched_at": launched_at,
|
||||
"node": "mynode.domain.tld",
|
||||
"ramdisk_id": "",
|
||||
"access_ip_v6": None,
|
||||
"disk_gb": 50,
|
||||
"access_ip_v4": None,
|
||||
"kernel_id": "",
|
||||
"image_name": "CentOS 6.4 x86_64",
|
||||
"host": "node02",
|
||||
"display_name": display_name,
|
||||
"root_gb": 50,
|
||||
"tenant_id": tenant_id,
|
||||
"created_at": created_at,
|
||||
"instance_id": instance_id,
|
||||
"instance_type": instance_type,
|
||||
"vcpus": 1,
|
||||
"image_meta": {
|
||||
"min_disk": "50",
|
||||
"container_format": "bare",
|
||||
"min_ram": "256",
|
||||
"disk_format": "qcow2",
|
||||
"build_version": "68",
|
||||
"version": os_version,
|
||||
"architecture": "x86_64",
|
||||
"auto_disk_config": "True",
|
||||
"os_type": os_type,
|
||||
"base_image_ref": "ea0d5e26-a272-462a-9333-1e38813bac7b",
|
||||
"distro": os_distro
|
||||
},
|
||||
"architecture": "x86_64",
|
||||
"os_type": "linux",
|
||||
"instance_flavor_id": instance_flavor_id,
|
||||
"metadata": metadata
|
||||
},
|
||||
"timestamp": timestamp.strftime("%Y-%m-%dT%H:%M:%S.%fZ"),
|
||||
"updated_at": _format_date(timestamp - timedelta(seconds=10)),
|
||||
}
|
||||
|
||||
|
||||
def _get_volume_icehouse_payload(event_type, volume_id=None, tenant_id=None, display_name=None, volume_type=None,
|
||||
volume_size=None, timestamp=None, created_at=None, launched_at=None, status=None,
|
||||
attached_to=None):
|
||||
volume_id = volume_id or "64a0ca7f-5f5a-4dc5-a1e1-e04e89eb95ed"
|
||||
tenant_id = tenant_id or "46eeb8e44298460899cf4b3554bfe11f"
|
||||
display_name = display_name or "mytenant-0001-myvolume"
|
||||
volume_type = volume_type or DEFAULT_VOLUME_TYPE
|
||||
volume_size = volume_size or 50
|
||||
timestamp = timestamp if timestamp else "2014-02-14T17:18:40.888401Z"
|
||||
created_at = _format_date(created_at) if created_at else "2014-02-14T17:18:35.000000Z"
|
||||
launched_at = _format_date(launched_at) if launched_at else "2014-02-14T17:18:40.765844Z"
|
||||
status = status or "available"
|
||||
attached_to = attached_to or "e7d44dea-21c1-452c-b50c-cbab0d07d7d3"
|
||||
|
||||
if not isinstance(timestamp, datetime):
|
||||
timestamp = dateutil.parser.parse(timestamp)
|
||||
|
||||
return {
|
||||
"event_type": event_type,
|
||||
"timestamp": launched_at,
|
||||
"publisher_id": "volume.cinder01",
|
||||
"payload": {
|
||||
"instance_uuid": attached_to,
|
||||
"status": status,
|
||||
"display_name": display_name,
|
||||
"availability_zone": "nova",
|
||||
"tenant_id": tenant_id,
|
||||
"created_at": created_at,
|
||||
"snapshot_id": None,
|
||||
"volume_type": volume_type,
|
||||
"volume_id": volume_id,
|
||||
"user_id": "ebc0d5a5ecf3417ca0d4f8c90d682f6e",
|
||||
"launched_at": launched_at,
|
||||
"size": volume_size,
|
||||
},
|
||||
"priority": "INFO",
|
||||
"updated_at": _format_date(timestamp - timedelta(seconds=10)),
|
||||
|
||||
}
|
||||
|
||||
|
||||
def _get_volume_kilo_payload(event_type, volume_id=None, tenant_id=None, display_name=None, volume_type=None,
|
||||
timestamp=None, attached_to=None, volume_size=1):
|
||||
volume_id = volume_id or "64a0ca7f-5f5a-4dc5-a1e1-e04e89eb95ed"
|
||||
tenant_id = tenant_id or "46eeb8e44298460899cf4b3554bfe11f"
|
||||
display_name = display_name or "mytenant-0001-myvolume"
|
||||
volume_type = volume_type or DEFAULT_VOLUME_TYPE
|
||||
timestamp = timestamp if timestamp else "2014-02-14T17:18:40.888401Z"
|
||||
attached_to = attached_to
|
||||
volume_attachment = []
|
||||
|
||||
if not isinstance(timestamp, datetime):
|
||||
timestamp = dateutil.parser.parse(timestamp)
|
||||
|
||||
for instance_id in attached_to:
|
||||
volume_attachment.append({
|
||||
"instance_uuid": instance_id,
|
||||
"attach_time": _format_date(timestamp - timedelta(seconds=10)),
|
||||
"deleted": False,
|
||||
"attach_mode": "ro",
|
||||
"created_at": _format_date(timestamp - timedelta(seconds=10)),
|
||||
"attached_host": "",
|
||||
"updated_at": _format_date(timestamp - timedelta(seconds=10)),
|
||||
"attach_status": 'available',
|
||||
"detach_time": "",
|
||||
"volume_id": volume_id,
|
||||
"mountpoint": "/dev/vdd",
|
||||
"deleted_at": "",
|
||||
"id": "228345ee-0520-4d45-86fa-1e4c9f8d057d"
|
||||
})
|
||||
|
||||
return {
|
||||
"event_type": event_type,
|
||||
"timestamp": _format_date(timestamp),
|
||||
"publisher_id": "volume.cinder01",
|
||||
"payload": {
|
||||
"status": "in-use",
|
||||
"display_name": display_name,
|
||||
"volume_attachment": volume_attachment,
|
||||
"availability_zone": "nova",
|
||||
"tenant_id": tenant_id,
|
||||
"created_at": "2015-07-27T16:11:07Z",
|
||||
"volume_id": volume_id,
|
||||
"volume_type": volume_type,
|
||||
"host": "web@lvmdriver-1#lvmdriver-1",
|
||||
"replication_status": "disabled",
|
||||
"user_id": "aa518ac79d4c4d61b806e64600fcad21",
|
||||
"metadata": [],
|
||||
"launched_at": "2015-07-27T16:11:08Z",
|
||||
"size": volume_size
|
||||
},
|
||||
"priority": "INFO",
|
||||
"updated_at": _format_date(timestamp - timedelta(seconds=10)),
|
||||
}
|
||||
|
||||
|
||||
def get_instance_rebuild_end_sample():
|
||||
return _get_instance_payload("compute.instance.rebuild.end")
|
||||
|
||||
|
||||
def get_instance_resized_end_sample():
|
||||
return _get_instance_payload("compute.instance.resize.confirm.end")
|
||||
|
||||
|
||||
def get_volume_update_end_sample(volume_id=None, tenant_id=None, volume_type=None, volume_size=None,
|
||||
creation_timestamp=None, deletion_timestamp=None, name=None):
|
||||
kwargs = {
|
||||
"volume_id": volume_id or "64a0ca7f-5f5a-4dc5-a1e1-e04e89eb95ed",
|
||||
"tenant_id": tenant_id or "46eeb8e44298460899cf4b3554bfe11f",
|
||||
"display_name": name or "mytenant-0001-myvolume",
|
||||
"volume_type": volume_type or DEFAULT_VOLUME_TYPE,
|
||||
"volume_size": volume_size or 50,
|
||||
"created_at": creation_timestamp if creation_timestamp else datetime(2014, 2, 14, 17, 18, 35, tzinfo=pytz.utc),
|
||||
"launched_at": deletion_timestamp if deletion_timestamp else datetime(2014, 2, 23, 8, 1, 58, tzinfo=pytz.utc),
|
||||
"timestamp": deletion_timestamp if deletion_timestamp else datetime(2014, 2, 23, 8, 1, 58, tzinfo=pytz.utc),
|
||||
"status": "deleting"
|
||||
}
|
||||
return _get_volume_icehouse_payload("volume.resize.end", **kwargs)
|
||||
|
||||
|
||||
def get_volume_type_create_sample(volume_type_id, volume_type_name):
|
||||
return {
|
||||
"event_type": "volume_type.create",
|
||||
"publisher_id": "volume.cinder01",
|
||||
"payload": {
|
||||
"volume_types": {
|
||||
"name": volume_type_name,
|
||||
"qos_specs_id": None,
|
||||
"deleted": False,
|
||||
"created_at": "2014-02-14T17:18:35.036186Z",
|
||||
"extra_specs": {},
|
||||
"deleted_at": None,
|
||||
"id": volume_type_id,
|
||||
}
|
||||
},
|
||||
"updated_at": "2014-02-14T17:18:35.036186Z",
|
||||
}
|
0
integration_tests/helpers/__init__.py
Normal file
0
integration_tests/helpers/__init__.py
Normal file
68
integration_tests/helpers/almanach_helper.py
Normal file
68
integration_tests/helpers/almanach_helper.py
Normal file
@ -0,0 +1,68 @@
|
||||
# Copyright 2016 Internap.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import os
|
||||
import requests
|
||||
|
||||
|
||||
class AlmanachHelper(object):
|
||||
x_auth_token = 'secret'
|
||||
|
||||
def __init__(self):
|
||||
is_container = True if os.environ.get('TEST_CONTAINER') else False
|
||||
port = 8000 if is_container else 80
|
||||
hostname = "api" if is_container else "127.0.0.1"
|
||||
self.base_url = "http://{url}:{port}".format(url=hostname, port=port)
|
||||
|
||||
def get_entities(self, tenant_id, start):
|
||||
url = "{url}/project/{project}/entities?start={start}".format(
|
||||
url=self.base_url, project=tenant_id, start=start
|
||||
)
|
||||
|
||||
response = requests.get(url, headers=self._get_query_headers())
|
||||
return response.json()
|
||||
|
||||
def get(self, url, headers=None, **params):
|
||||
return requests.get(
|
||||
url.format(url=self.base_url, **params),
|
||||
headers=headers if headers else self._get_query_headers()
|
||||
)
|
||||
|
||||
def post(self, url, data, **params):
|
||||
return requests.post(
|
||||
url.format(url=self.base_url, **params),
|
||||
data=json.dumps(data),
|
||||
headers=self._get_query_headers()
|
||||
)
|
||||
|
||||
def put(self, url, data, **params):
|
||||
return requests.put(
|
||||
url.format(url=self.base_url, **params),
|
||||
data=json.dumps(data),
|
||||
headers=self._get_query_headers()
|
||||
)
|
||||
|
||||
def delete(self, url, data, **params):
|
||||
return requests.delete(
|
||||
url.format(url=self.base_url, **params),
|
||||
data=json.dumps(data),
|
||||
headers=self._get_query_headers()
|
||||
)
|
||||
|
||||
def _get_query_headers(self):
|
||||
return {
|
||||
'X-Auth-Token': self.x_auth_token,
|
||||
'Accept': 'application/json'
|
||||
}
|
27
integration_tests/helpers/mongo_helper.py
Normal file
27
integration_tests/helpers/mongo_helper.py
Normal file
@ -0,0 +1,27 @@
|
||||
# Copyright 2016 Internap.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
|
||||
from pymongo import MongoClient
|
||||
|
||||
|
||||
class MongoHelper(object):
|
||||
|
||||
def __init__(self):
|
||||
is_container = True if os.environ.get('TEST_CONTAINER') else False
|
||||
self.mongo = MongoClient(host="database" if is_container else "127.0.0.1")
|
||||
|
||||
def drop_database(self):
|
||||
self.mongo.drop_database('almanach')
|
34
integration_tests/helpers/rabbit_mq_helper.py
Normal file
34
integration_tests/helpers/rabbit_mq_helper.py
Normal file
@ -0,0 +1,34 @@
|
||||
# Copyright 2016 Internap.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
|
||||
from kombu import BrokerConnection
|
||||
from kombu import Exchange
|
||||
from kombu.pools import producers
|
||||
from kombu.common import maybe_declare
|
||||
|
||||
|
||||
class RabbitMqHelper(object):
|
||||
def __init__(self):
|
||||
is_container = True if os.environ.get('TEST_CONTAINER') else False
|
||||
hostname = "messaging" if is_container else "127.0.0.1"
|
||||
amqp_url = "amqp://guest:guest@{url}:{port}".format(url=hostname, port=5672)
|
||||
self.task_exchange = Exchange("almanach.info", type="topic")
|
||||
self.connection = BrokerConnection(amqp_url)
|
||||
|
||||
def push(self, message):
|
||||
with producers[self.connection].acquire(block=True) as producer:
|
||||
maybe_declare(self.task_exchange, producer.channel)
|
||||
producer.publish(message, routing_key="almanach.info")
|
352
integration_tests/test_api_almanach.py
Normal file
352
integration_tests/test_api_almanach.py
Normal file
@ -0,0 +1,352 @@
|
||||
# Copyright 2016 Internap.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from uuid import uuid4
|
||||
|
||||
from hamcrest import assert_that, has_item, equal_to, has_entry
|
||||
from base_api_testcase import BaseApiTestCase
|
||||
|
||||
|
||||
class ApiAlmanachTest(BaseApiTestCase):
|
||||
|
||||
def test_the_info_page(self):
|
||||
response = self.almanachHelper.get(url="{url}/info")
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
def test_list_entities_unauthorized(self):
|
||||
list_query = "{url}/project/{project}/instances?start={start}&end={end}"
|
||||
response = self.almanachHelper.get(url=list_query, headers={'Accept': 'application/json'},
|
||||
project="e455d65807cb4796bd72abecdc8a76ba",
|
||||
start="2014-02-28 18:50:00.000", end="2014-03-21 22:00:00.000")
|
||||
assert_that(response.status_code, equal_to(401))
|
||||
|
||||
def test_instance_create_missing_type_name_param(self):
|
||||
volume_type_query = "{url}/volume_type"
|
||||
volume_type_id = str(uuid4())
|
||||
data = dict(
|
||||
type_id=volume_type_id
|
||||
)
|
||||
|
||||
response = self.almanachHelper.post(url=volume_type_query, data=data)
|
||||
|
||||
assert_that(response.status_code, equal_to(400))
|
||||
assert_that(response.json(), has_entry(
|
||||
"error",
|
||||
"The 'type_name' param is mandatory for the request you have made."
|
||||
))
|
||||
|
||||
def test_instance_create(self):
|
||||
instance_create_query = "{url}/project/{project}/instance"
|
||||
project_id = "my_test_project_id"
|
||||
instance_id = str(uuid4())
|
||||
data = {'id': instance_id,
|
||||
'created_at': '2016-01-01T18:30:00Z',
|
||||
'name': 'integration_test_instance_FlavorA',
|
||||
'flavor': 'FlavorA',
|
||||
'os_type': 'Linux',
|
||||
'os_distro': 'Ubuntu',
|
||||
'os_version': '14.04'}
|
||||
|
||||
response = self.almanachHelper.post(url=instance_create_query, data=data, project=project_id)
|
||||
assert_that(response.status_code, equal_to(201))
|
||||
|
||||
list_query = "{url}/project/{project}/instances?start={start}&end={end}"
|
||||
response = self.almanachHelper.get(url=list_query, project=project_id,
|
||||
start="2016-01-01 18:29:00.000", end="2016-01-01 18:31:00.000")
|
||||
|
||||
assert_that(response.status_code, equal_to(200))
|
||||
assert_that(response.json(), has_item({'entity_id': instance_id,
|
||||
'end': None,
|
||||
'entity_type': 'instance',
|
||||
'flavor': data['flavor'],
|
||||
'last_event': '2016-01-01 18:30:00+00:00',
|
||||
'name': data['name'],
|
||||
'os': {
|
||||
'distro': data['os_distro'],
|
||||
'os_type': data['os_type'],
|
||||
'version': data['os_version']
|
||||
},
|
||||
'project_id': project_id,
|
||||
'start': '2016-01-01 18:30:00+00:00',
|
||||
'metadata': {}}))
|
||||
|
||||
def test_instance_create_bad_date_format(self):
|
||||
instance_create_query = "{url}/project/{project}/instance"
|
||||
project_id = "my_test_project_id"
|
||||
instance_id = str(uuid4())
|
||||
data = {'id': instance_id,
|
||||
'created_at': 'A_BAD_DATE_FORMAT',
|
||||
'name': 'integration_test_instance_FlavorA',
|
||||
'flavor': 'FlavorA',
|
||||
'os_type': 'Linux',
|
||||
'os_distro': 'Ubuntu',
|
||||
'os_version': '14.04'}
|
||||
|
||||
response = self.almanachHelper.post(url=instance_create_query, data=data, project=project_id)
|
||||
|
||||
assert_that(response.status_code, equal_to(400))
|
||||
assert_that(response.json(), has_entry(
|
||||
'error',
|
||||
'The provided date has an invalid format. Format should be of yyyy-mm-ddThh:mm:ss.msZ, '
|
||||
'ex: 2015-01-31T18:24:34.1523Z'
|
||||
))
|
||||
|
||||
def test_instance_create_missing_flavor_param(self):
|
||||
instance_create_query = "{url}/project/{project}/instance"
|
||||
project_id = "my_test_project_id"
|
||||
instance_id = str(uuid4())
|
||||
data = {'id': instance_id,
|
||||
'created_at': '2016-01-01T18:30:00Z',
|
||||
'name': 'integration_test_instance_FlavorA',
|
||||
'os_type': 'Linux',
|
||||
'os_distro': 'Ubuntu',
|
||||
'os_version': '14.04'}
|
||||
|
||||
response = self.almanachHelper.post(url=instance_create_query, data=data, project=project_id)
|
||||
|
||||
assert_that(response.status_code, equal_to(400))
|
||||
assert_that(response.json(), has_entry(
|
||||
"error",
|
||||
"The 'flavor' param is mandatory for the request you have made."
|
||||
))
|
||||
|
||||
def test_instance_delete(self):
|
||||
instance_create_query = "{url}/project/{project}/instance"
|
||||
project_id = "my_test_project_id"
|
||||
instance_id = str(uuid4())
|
||||
create_data = {'id': instance_id,
|
||||
'created_at': '2016-01-01T18:30:00Z',
|
||||
'name': 'integration_test_instance_FlavorA',
|
||||
'flavor': 'FlavorA',
|
||||
'os_type': 'Linux',
|
||||
'os_distro': 'Ubuntu',
|
||||
'os_version': '14.04'}
|
||||
|
||||
response = self.almanachHelper.post(url=instance_create_query, data=create_data, project=project_id)
|
||||
assert_that(response.status_code, equal_to(201))
|
||||
|
||||
instance_delete_query = "{url}/instance/{instance_id}"
|
||||
delete_data = {'date': '2016-01-01T18:50:00Z'}
|
||||
response = self.almanachHelper.delete(url=instance_delete_query, data=delete_data, instance_id=instance_id)
|
||||
assert_that(response.status_code, equal_to(202))
|
||||
|
||||
list_query = "{url}/project/{project}/instances?start={start}&end={end}"
|
||||
response = self.almanachHelper.get(url=list_query,
|
||||
project=project_id,
|
||||
start="2016-01-01 18:49:00.000",
|
||||
end="2016-01-01 18:51:00.000")
|
||||
|
||||
assert_that(response.status_code, equal_to(200))
|
||||
assert_that(response.json(), has_item({'entity_id': instance_id,
|
||||
'end': '2016-01-01 18:50:00+00:00',
|
||||
'entity_type': 'instance',
|
||||
'flavor': create_data['flavor'],
|
||||
'last_event': '2016-01-01 18:50:00+00:00',
|
||||
'name': create_data['name'],
|
||||
'os': {
|
||||
'distro': create_data['os_distro'],
|
||||
'os_type': create_data['os_type'],
|
||||
'version': create_data['os_version']
|
||||
},
|
||||
'project_id': project_id,
|
||||
'start': '2016-01-01 18:30:00+00:00',
|
||||
'metadata': {}}))
|
||||
|
||||
def test_instance_delete_bad_date_format(self):
|
||||
instance_create_query = "{url}/project/{project}/instance"
|
||||
project_id = str(uuid4())
|
||||
instance_id = str(uuid4())
|
||||
data = {'id': instance_id,
|
||||
'created_at': '2016-01-01T18:30:00Z',
|
||||
'name': 'integration_test_instance_FlavorA',
|
||||
'flavor': 'FlavorA',
|
||||
'os_type': 'Linux',
|
||||
'os_distro': 'Ubuntu',
|
||||
'os_version': '14.04'}
|
||||
|
||||
response = self.almanachHelper.post(url=instance_create_query, data=data, project=project_id)
|
||||
assert_that(response.status_code, equal_to(201))
|
||||
|
||||
instance_delete_query = "{url}/instance/{instance_id}"
|
||||
delete_data = {'date': 'A_BAD_DATE'}
|
||||
|
||||
response = self.almanachHelper.delete(url=instance_delete_query, data=delete_data, instance_id=instance_id)
|
||||
assert_that(response.status_code, equal_to(400))
|
||||
assert_that(response.json(), has_entry(
|
||||
'error',
|
||||
'The provided date has an invalid format. Format should be of yyyy-mm-ddThh:mm:ss.msZ, '
|
||||
'ex: 2015-01-31T18:24:34.1523Z'
|
||||
))
|
||||
|
||||
def test_instance_delete_missing_param(self):
|
||||
instance_delete_query = "{url}/instance/{instance_id}"
|
||||
|
||||
response = self.almanachHelper.delete(url=instance_delete_query, data=dict(), instance_id="my_instance_id")
|
||||
assert_that(response.status_code, equal_to(400))
|
||||
assert_that(response.json(), has_entry(
|
||||
"error",
|
||||
"The 'date' param is mandatory for the request you have made."
|
||||
))
|
||||
|
||||
def test_instance_resize(self):
|
||||
instance_create_query = "{url}/project/{project}/instance"
|
||||
project_id = "my_test_project_id"
|
||||
instance_id = str(uuid4())
|
||||
create_data = {'id': instance_id,
|
||||
'created_at': '2016-01-01T18:30:00Z',
|
||||
'name': 'integration_test_instance_FlavorA',
|
||||
'flavor': 'FlavorA',
|
||||
'os_type': 'Linux',
|
||||
'os_distro': 'Ubuntu',
|
||||
'os_version': '14.04'}
|
||||
|
||||
response = self.almanachHelper.post(url=instance_create_query, data=create_data, project=project_id)
|
||||
assert_that(response.status_code, equal_to(201))
|
||||
|
||||
instance_resize_query = "{url}/instance/{instance_id}/resize"
|
||||
resize_data = {'date': '2016-01-01T18:40:00Z',
|
||||
'flavor': 'FlavorC'}
|
||||
|
||||
response = self.almanachHelper.put(url=instance_resize_query, data=resize_data, instance_id=instance_id)
|
||||
assert_that(response.status_code, equal_to(200))
|
||||
|
||||
list_query = "{url}/project/{project}/instances?start={start}&end={end}"
|
||||
response = self.almanachHelper.get(url=list_query,
|
||||
project=project_id,
|
||||
start="2016-01-01 18:39:00.000",
|
||||
end="2016-01-01 18:41:00.000")
|
||||
|
||||
assert_that(response.status_code, equal_to(200))
|
||||
assert_that(response.json(), has_item({'entity_id': instance_id,
|
||||
'end': None,
|
||||
'entity_type': 'instance',
|
||||
'flavor': resize_data['flavor'],
|
||||
'last_event': '2016-01-01 18:40:00+00:00',
|
||||
'name': create_data['name'],
|
||||
'os': {
|
||||
'distro': create_data['os_distro'],
|
||||
'os_type': create_data['os_type'],
|
||||
'version': create_data['os_version']
|
||||
},
|
||||
'project_id': project_id,
|
||||
'start': '2016-01-01 18:40:00+00:00',
|
||||
'metadata': {}}))
|
||||
|
||||
def test_instance_resize_bad_date_format(self):
|
||||
instance_resize_query = "{url}/instance/{instance_id}/resize"
|
||||
resize_data = {'date': 'A_BAD_DATE',
|
||||
'flavor': 'FlavorC'}
|
||||
|
||||
response = self.almanachHelper.put(url=instance_resize_query, data=resize_data, instance_id="my_instance_id")
|
||||
assert_that(response.status_code, equal_to(400))
|
||||
assert_that(response.json(), has_entry(
|
||||
'error',
|
||||
'The provided date has an invalid format. Format should be of yyyy-mm-ddThh:mm:ss.msZ, '
|
||||
'ex: 2015-01-31T18:24:34.1523Z'
|
||||
))
|
||||
|
||||
def test_instance_resize_missing_param(self):
|
||||
instance_resize_query = "{url}/instance/{instance_id}/resize"
|
||||
resize_data = {'flavor': 'FlavorC'}
|
||||
|
||||
response = self.almanachHelper.put(url=instance_resize_query, data=resize_data, instance_id="my_instance_id")
|
||||
assert_that(response.status_code, equal_to(400))
|
||||
assert_that(response.json(), has_entry(
|
||||
"error",
|
||||
"The 'date' param is mandatory for the request you have made."
|
||||
))
|
||||
|
||||
def test_instance_rebuild(self):
|
||||
instance_create_query = "{url}/project/{project}/instance"
|
||||
project_id = "my_test_project_id"
|
||||
instance_id = str(uuid4())
|
||||
create_data = {'id': instance_id,
|
||||
'created_at': '2016-01-01T18:30:00Z',
|
||||
'name': 'integration_test_instance_FlavorA',
|
||||
'flavor': 'FlavorA',
|
||||
'os_type': 'Linux',
|
||||
'os_distro': 'Ubuntu',
|
||||
'os_version': '12.04'}
|
||||
|
||||
response = self.almanachHelper.post(url=instance_create_query, data=create_data, project=project_id)
|
||||
|
||||
assert_that(response.status_code, equal_to(201))
|
||||
|
||||
update_instance_rebuild_query = "{url}/instance/{instance_id}/rebuild"
|
||||
rebuild_data = {
|
||||
'distro': 'Ubuntu',
|
||||
'version': '14.04',
|
||||
'os_type': 'Linux',
|
||||
'rebuild_date': '2016-01-01T18:40:00Z'
|
||||
}
|
||||
|
||||
response = self.almanachHelper.put(url=update_instance_rebuild_query, data=rebuild_data,
|
||||
instance_id=instance_id)
|
||||
|
||||
assert_that(response.status_code, equal_to(200))
|
||||
|
||||
list_query = "{url}/project/{project}/instances?start={start}&end={end}"
|
||||
response = self.almanachHelper.get(url=list_query, project=project_id,
|
||||
start="2016-01-01 18:39:00.000", end="2016-01-01 18:41:00.000")
|
||||
|
||||
assert_that(response.status_code, equal_to(200))
|
||||
assert_that(response.json(), has_item({'entity_id': instance_id,
|
||||
'end': None,
|
||||
'entity_type': 'instance',
|
||||
'flavor': create_data['flavor'],
|
||||
'last_event': '2016-01-01 18:40:00+00:00',
|
||||
'name': create_data['name'],
|
||||
'os': {
|
||||
'distro': create_data['os_distro'],
|
||||
'os_type': create_data['os_type'],
|
||||
'version': rebuild_data['version']
|
||||
},
|
||||
'project_id': project_id,
|
||||
'start': '2016-01-01 18:40:00+00:00',
|
||||
'metadata': {}}))
|
||||
|
||||
def test_instance_rebuild_bad_date_format(self):
|
||||
update_instance_rebuild_query = "{url}/instance/{instance_id}/rebuild"
|
||||
instance_id = str(uuid4())
|
||||
rebuild_data = {
|
||||
'distro': 'Ubuntu',
|
||||
'version': '14.04',
|
||||
'os_type': 'Linux',
|
||||
'rebuild_date': 'A_BAD_DATE'
|
||||
}
|
||||
response = self.almanachHelper.put(url=update_instance_rebuild_query, data=rebuild_data,
|
||||
instance_id=instance_id)
|
||||
assert_that(response.status_code, equal_to(400))
|
||||
assert_that(response.json(), has_entry(
|
||||
'error',
|
||||
'The provided date has an invalid format. Format should be of yyyy-mm-ddThh:mm:ss.msZ, '
|
||||
'ex: 2015-01-31T18:24:34.1523Z'
|
||||
))
|
||||
|
||||
def test_instance_rebuild_missing_param(self):
|
||||
update_instance_rebuild_query = "{url}/instance/{instance_id}/rebuild"
|
||||
instance_id = str(uuid4())
|
||||
rebuild_data = {
|
||||
'distro': 'Ubuntu',
|
||||
'os_type': 'Linux',
|
||||
'rebuild_date': 'A_BAD_DATE'
|
||||
}
|
||||
|
||||
response = self.almanachHelper.put(url=update_instance_rebuild_query, data=rebuild_data,
|
||||
instance_id=instance_id)
|
||||
assert_that(response.status_code, equal_to(400))
|
||||
assert_that(response.json(), has_entry(
|
||||
"error",
|
||||
"The 'version' param is mandatory for the request you have made."
|
||||
))
|
122
integration_tests/test_api_instance_entity.py
Normal file
122
integration_tests/test_api_instance_entity.py
Normal file
@ -0,0 +1,122 @@
|
||||
# Copyright 2016 Internap.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from uuid import uuid4
|
||||
from hamcrest import assert_that, has_entry, equal_to
|
||||
|
||||
from base_api_testcase import BaseApiTestCase
|
||||
|
||||
|
||||
class ApiInstanceEntityTest(BaseApiTestCase):
|
||||
def test_update_entity_instance_with_multiple_attributes(self):
|
||||
instance_id = self._create_instance_entity()
|
||||
|
||||
response = self.almanachHelper.put(url="{url}/entity/instance/{instance_id}",
|
||||
data={"start_date": "2016-04-14T18:30:00.00Z", "flavor": "FlavorB"},
|
||||
instance_id=instance_id,
|
||||
)
|
||||
|
||||
assert_that(response.status_code, equal_to(200))
|
||||
assert_that(response.json(), has_entry("entity_id", instance_id))
|
||||
assert_that(response.json(), has_entry("start", "2016-04-14 18:30:00+00:00"))
|
||||
assert_that(response.json(), has_entry("flavor", "FlavorB"))
|
||||
|
||||
def test_update_entity_instance_with_multiple_wrong_attributes(self):
|
||||
instance_id = self._create_instance_entity()
|
||||
|
||||
response = self.almanachHelper.put(url="{url}/entity/instance/{instance_id}",
|
||||
data={"start_date": "2016-04-14T18:30:00.00Z", "flavor": 123, "os": 123},
|
||||
instance_id=instance_id,
|
||||
)
|
||||
|
||||
assert_that(response.status_code, equal_to(400))
|
||||
assert_that(response.json(), equal_to({"error": {"flavor": "expected unicode", "os": "expected a dictionary"}}))
|
||||
|
||||
def test_update_entity_instance_with_one_attribute(self):
|
||||
instance_id = self._create_instance_entity()
|
||||
|
||||
response = self.almanachHelper.put(url="{url}/entity/instance/{instance_id}",
|
||||
data={"start_date": "2016-04-14T18:30:00.00Z"},
|
||||
instance_id=instance_id,
|
||||
)
|
||||
|
||||
assert_that(response.status_code, equal_to(200))
|
||||
assert_that(response.json(), has_entry("entity_id", instance_id))
|
||||
assert_that(response.json(), has_entry("start", "2016-04-14 18:30:00+00:00"))
|
||||
|
||||
def test_update_entity_instance_with_invalid_attribute(self):
|
||||
project_id = "my_test_project_id"
|
||||
instance_id = self._create_instance_entity()
|
||||
data = {
|
||||
'id': instance_id,
|
||||
'created_at': '2016-01-01T18:30:00Z',
|
||||
'name': 'integration_test_instance_FlavorA',
|
||||
'flavor': 'FlavorA',
|
||||
'os_type': 'FreeBSD',
|
||||
'os_distro': 'Stable',
|
||||
'os_version': '10',
|
||||
}
|
||||
|
||||
response = self.almanachHelper.post(url="{url}/project/{project}/instance", data=data,
|
||||
project=project_id)
|
||||
assert_that(response.status_code, equal_to(201))
|
||||
|
||||
response = self.almanachHelper.put(url="{url}/entity/instance/{instance_id}",
|
||||
data={'flavor_flavor': 'FlavorA'},
|
||||
instance_id=instance_id,
|
||||
)
|
||||
|
||||
assert_that(response.status_code, equal_to(400))
|
||||
assert_that(response.json(), equal_to({"error": {"flavor_flavor": "extra keys not allowed"}}))
|
||||
|
||||
def test_update_entity_instance_with_wrong_date_format(self):
|
||||
instance_id = self._create_instance_entity()
|
||||
|
||||
response = self.almanachHelper.put(url="{url}/entity/instance/{instance_id}",
|
||||
data={"start_date": "my date"},
|
||||
instance_id=instance_id,
|
||||
)
|
||||
|
||||
assert_that(response.status_code, equal_to(400))
|
||||
assert_that(response.json(), equal_to(
|
||||
{"error": {"start_date": "value does not match expected format %Y-%m-%dT%H:%M:%S.%fZ"}}
|
||||
))
|
||||
|
||||
def test_update_entity_change_flavor_of_closed(self):
|
||||
instance_create_query = "{url}/project/{project}/instance"
|
||||
project_id = "my_test_project_id"
|
||||
instance_id = str(uuid4())
|
||||
data = {'id': instance_id,
|
||||
'created_at': '2016-01-01T18:30:00Z',
|
||||
'name': 'integration_test_instance_FlavorA',
|
||||
'flavor': 'FlavorA',
|
||||
'os_type': 'Linux',
|
||||
'os_distro': 'Ubuntu',
|
||||
'os_version': '14.04'}
|
||||
|
||||
self.almanachHelper.post(url=instance_create_query, data=data, project=project_id)
|
||||
instance_delete_query = "{url}/instance/{instance_id}"
|
||||
delete_data = {'date': '2016-01-01T18:50:00Z'}
|
||||
self.almanachHelper.delete(url=instance_delete_query, data=delete_data, instance_id=instance_id)
|
||||
|
||||
response = self.almanachHelper.put(url="{url}/entity/instance/{instance_id}?start={start}&end={end}",
|
||||
start="2016-01-01 18:29:59.0",
|
||||
end="2016-01-01 18:50:00.0",
|
||||
data={"flavor": "FlavorB",
|
||||
"end_date": "2016-01-02 18:50:00.0Z"},
|
||||
instance_id=instance_id,
|
||||
)
|
||||
assert_that(response.status_code, equal_to(200))
|
||||
assert_that(response.json(), has_entry("flavor", "FlavorB"))
|
||||
assert_that(response.json(), has_entry("end", "2016-01-02 18:50:00+00:00"))
|
411
integration_tests/test_api_volume.py
Normal file
411
integration_tests/test_api_volume.py
Normal file
@ -0,0 +1,411 @@
|
||||
# Copyright 2016 Internap.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from uuid import uuid4
|
||||
from hamcrest import equal_to, assert_that, has_entry, has_item
|
||||
from retry import retry
|
||||
|
||||
from builders import messages
|
||||
from base_api_testcase import BaseApiTestCase
|
||||
from helpers.mongo_helper import MongoHelper
|
||||
|
||||
|
||||
class ApiVolumeTest(BaseApiTestCase):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls.setup_volume_type()
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
MongoHelper().drop_database()
|
||||
|
||||
@classmethod
|
||||
def setup_volume_type(cls):
|
||||
cls.rabbitMqHelper.push(
|
||||
message=messages.get_volume_type_create_sample(volume_type_id=messages.DEFAULT_VOLUME_TYPE,
|
||||
volume_type_name=messages.DEFAULT_VOLUME_TYPE),
|
||||
)
|
||||
cls._wait_until_volume_type_is_created()
|
||||
|
||||
def test_volume_create(self):
|
||||
volume_create_query = "{url}/project/{project}/volume"
|
||||
project_id = "my_test_project_id"
|
||||
volume_id = str(uuid4())
|
||||
data = {'volume_id': volume_id,
|
||||
'attached_to': [],
|
||||
'volume_name': messages.DEFAULT_VOLUME_NAME,
|
||||
'volume_type': messages.DEFAULT_VOLUME_TYPE,
|
||||
'start': '2016-01-01T18:30:00Z',
|
||||
'size': 100}
|
||||
|
||||
response = self.almanachHelper.post(url=volume_create_query, data=data, project=project_id)
|
||||
assert_that(response.status_code, equal_to(201))
|
||||
|
||||
list_query = "{url}/project/{project}/volumes?start={start}&end={end}"
|
||||
response = self.almanachHelper.get(url=list_query,
|
||||
project=project_id,
|
||||
start="2016-01-01 18:29:00.000", end="2016-01-01 18:31:00.000")
|
||||
|
||||
assert_that(response.status_code, equal_to(200))
|
||||
assert_that(response.json(), has_item({'entity_id': volume_id,
|
||||
'attached_to': data['attached_to'],
|
||||
'end': None,
|
||||
'name': data['volume_name'],
|
||||
'entity_type': 'volume',
|
||||
'last_event': '2016-01-01 18:30:00+00:00',
|
||||
'volume_type': messages.DEFAULT_VOLUME_TYPE,
|
||||
'start': '2016-01-01 18:30:00+00:00',
|
||||
'project_id': project_id,
|
||||
'size': data['size']}))
|
||||
|
||||
def test_volume_create_bad_date_format(self):
|
||||
volume_create_query = "{url}/project/{project}/volume"
|
||||
project_id = "my_test_project_id"
|
||||
volume_id = str(uuid4())
|
||||
data = {'volume_id': volume_id,
|
||||
'attached_to': [],
|
||||
'volume_name': messages.DEFAULT_VOLUME_NAME,
|
||||
'volume_type': messages.DEFAULT_VOLUME_TYPE,
|
||||
'start': 'BAD_DATE_FORMAT',
|
||||
'size': 100}
|
||||
|
||||
response = self.almanachHelper.post(url=volume_create_query, data=data, project=project_id)
|
||||
|
||||
assert_that(response.status_code, equal_to(400))
|
||||
assert_that(response.json(), has_entry(
|
||||
'error',
|
||||
'The provided date has an invalid format. Format should be of yyyy-mm-ddThh:mm:ss.msZ, '
|
||||
'ex: 2015-01-31T18:24:34.1523Z'
|
||||
))
|
||||
|
||||
def test_volume_create_missing_param(self):
|
||||
volume_create_query = "{url}/project/{project}/volume"
|
||||
project_id = "my_test_project_id"
|
||||
volume_id = str(uuid4())
|
||||
data = {'volume_id': volume_id,
|
||||
'attached_to': [],
|
||||
'volume_name': messages.DEFAULT_VOLUME_NAME,
|
||||
'volume_type': messages.DEFAULT_VOLUME_TYPE,
|
||||
'size': 100}
|
||||
|
||||
response = self.almanachHelper.post(url=volume_create_query, data=data, project=project_id)
|
||||
|
||||
assert_that(response.status_code, equal_to(400))
|
||||
assert_that(response.json(), has_entry(
|
||||
"error",
|
||||
"The 'start' param is mandatory for the request you have made."
|
||||
))
|
||||
|
||||
def test_volume_delete(self):
|
||||
volume_create_query = "{url}/project/{project}/volume"
|
||||
project_id = "my_test_project_id"
|
||||
volume_id = str(uuid4())
|
||||
create_data = {'volume_id': volume_id,
|
||||
'attached_to': [],
|
||||
'volume_name': messages.DEFAULT_VOLUME_NAME,
|
||||
'volume_type': messages.DEFAULT_VOLUME_TYPE,
|
||||
'start': '2016-01-01T18:30:00Z',
|
||||
'size': 100}
|
||||
|
||||
response = self.almanachHelper.post(url=volume_create_query, data=create_data, project=project_id)
|
||||
assert_that(response.status_code, equal_to(201))
|
||||
|
||||
volume_delete_query = "{url}/volume/{volume_id}"
|
||||
delete_data = {'date': '2016-01-01T18:50:00Z'}
|
||||
response = self.almanachHelper.delete(url=volume_delete_query, data=delete_data, volume_id=volume_id)
|
||||
assert_that(response.status_code, equal_to(202))
|
||||
|
||||
list_query = "{url}/project/{project}/volumes?start={start}&end={end}"
|
||||
response = self.almanachHelper.get(url=list_query, project=project_id,
|
||||
start="2016-01-01 18:49:00.000", end="2016-01-01 18:51:00.000")
|
||||
|
||||
assert_that(response.status_code, equal_to(200))
|
||||
assert_that(response.json(), has_item({'entity_id': volume_id,
|
||||
'attached_to': create_data['attached_to'],
|
||||
'end': '2016-01-01 18:50:00+00:00',
|
||||
'name': create_data['volume_name'],
|
||||
'entity_type': 'volume',
|
||||
'last_event': '2016-01-01 18:50:00+00:00',
|
||||
'volume_type': messages.DEFAULT_VOLUME_TYPE,
|
||||
'start': '2016-01-01 18:30:00+00:00',
|
||||
'project_id': project_id,
|
||||
'size': create_data['size']}))
|
||||
|
||||
def test_volume_delete_bad_date_format(self):
|
||||
volume_delete_query = "{url}/volume/{volume_id}"
|
||||
delete_data = {'date': 'A_BAD_DATE'}
|
||||
|
||||
response = self.almanachHelper.delete(url=volume_delete_query, data=delete_data, volume_id="my_test_volume_id")
|
||||
assert_that(response.status_code, equal_to(400))
|
||||
assert_that(response.json(), has_entry(
|
||||
'error',
|
||||
'The provided date has an invalid format. Format should be of yyyy-mm-ddThh:mm:ss.msZ, '
|
||||
'ex: 2015-01-31T18:24:34.1523Z'
|
||||
))
|
||||
|
||||
def test_volume_delete_missing_param(self):
|
||||
instance_delete_query = "{url}/volume/{volume_id}"
|
||||
|
||||
response = self.almanachHelper.delete(url=instance_delete_query, data=dict(), volume_id="my_test_volume_id")
|
||||
assert_that(response.status_code, equal_to(400))
|
||||
assert_that(response.json(), has_entry(
|
||||
"error",
|
||||
"The 'date' param is mandatory for the request you have made."
|
||||
))
|
||||
|
||||
def test_volume_resize(self):
|
||||
volume_create_query = "{url}/project/{project}/volume"
|
||||
project_id = "my_test_project_id"
|
||||
volume_id = str(uuid4())
|
||||
create_data = {'volume_id': volume_id,
|
||||
'attached_to': [],
|
||||
'volume_name': messages.DEFAULT_VOLUME_NAME,
|
||||
'volume_type': messages.DEFAULT_VOLUME_TYPE,
|
||||
'start': '2016-01-01T18:30:00Z',
|
||||
'size': 100}
|
||||
|
||||
response = self.almanachHelper.post(url=volume_create_query, data=create_data, project=project_id)
|
||||
assert_that(response.status_code, equal_to(201))
|
||||
|
||||
resize_data = {'date': '2016-01-01T18:40:00Z',
|
||||
'size': '150'}
|
||||
|
||||
volume_resize_query = "{url}/volume/{volume_id}/resize"
|
||||
response = self.almanachHelper.put(url=volume_resize_query, data=resize_data, volume_id=volume_id)
|
||||
assert_that(response.status_code, equal_to(200))
|
||||
|
||||
list_query = "{url}/project/{project}/volumes?start={start}&end={end}"
|
||||
response = self.almanachHelper.get(url=list_query,
|
||||
project=project_id,
|
||||
start="2016-01-01 18:39:00.000",
|
||||
end="2016-01-01 18:41:00.000")
|
||||
|
||||
assert_that(response.status_code, equal_to(200))
|
||||
assert_that(response.json(), has_item({'entity_id': volume_id,
|
||||
'attached_to': create_data['attached_to'],
|
||||
'end': None,
|
||||
'name': create_data['volume_name'],
|
||||
'entity_type': 'volume',
|
||||
'last_event': '2016-01-01 18:40:00+00:00',
|
||||
'volume_type': messages.DEFAULT_VOLUME_TYPE,
|
||||
'start': '2016-01-01 18:40:00+00:00',
|
||||
'project_id': project_id,
|
||||
'size': resize_data['size']}))
|
||||
|
||||
def test_volume_resize_bad_date_format(self):
|
||||
volume_resize_query = "{url}/volume/my_test_volume_id/resize"
|
||||
resize_data = {'date': 'A_BAD_DATE',
|
||||
'size': '150'}
|
||||
|
||||
response = self.almanachHelper.put(url=volume_resize_query, data=resize_data)
|
||||
assert_that(response.status_code, equal_to(400))
|
||||
assert_that(response.json(), has_entry(
|
||||
'error',
|
||||
'The provided date has an invalid format. Format should be of yyyy-mm-ddThh:mm:ss.msZ, '
|
||||
'ex: 2015-01-31T18:24:34.1523Z'
|
||||
))
|
||||
|
||||
def test_volume_resize_missing_param(self):
|
||||
volume_resize_query = "{url}/volume/my_test_volume_id/resize"
|
||||
resize_data = {'size': '250'}
|
||||
|
||||
response = self.almanachHelper.put(url=volume_resize_query, data=resize_data, instance_id="my_instance_id")
|
||||
assert_that(response.status_code, equal_to(400))
|
||||
assert_that(response.json(), has_entry(
|
||||
"error",
|
||||
"The 'date' param is mandatory for the request you have made."
|
||||
))
|
||||
|
||||
def test_volume_attach(self):
|
||||
instance_create_query = "{url}/project/{project}/instance"
|
||||
project_id = "my_test_project_id"
|
||||
instance_id = str(uuid4())
|
||||
instance_data = {'id': instance_id,
|
||||
'created_at': '2016-01-01T18:30:00Z',
|
||||
'name': 'integration_test_instance_FlavorA',
|
||||
'flavor': 'FlavorA',
|
||||
'os_type': 'Linux',
|
||||
'os_distro': 'Ubuntu',
|
||||
'os_version': '14.04'}
|
||||
|
||||
response = self.almanachHelper.post(url=instance_create_query, data=instance_data, project=project_id)
|
||||
assert_that(response.status_code, equal_to(201))
|
||||
|
||||
volume_create_query = "{url}/project/{project}/volume"
|
||||
volume_id = str(uuid4())
|
||||
volume_data = {'volume_id': volume_id,
|
||||
'attached_to': [],
|
||||
'volume_name': messages.DEFAULT_VOLUME_NAME,
|
||||
'volume_type': messages.DEFAULT_VOLUME_TYPE,
|
||||
'start': '2016-01-01T18:30:30Z',
|
||||
'size': 100}
|
||||
|
||||
response = self.almanachHelper.post(url=volume_create_query, data=volume_data, project=project_id)
|
||||
assert_that(response.status_code, equal_to(201))
|
||||
|
||||
attach_data = {'date': '2016-01-01T18:40:00Z', 'attachments': [instance_id]}
|
||||
|
||||
volume_attach_query = "{url}/volume/{volume_id}/attach"
|
||||
response = self.almanachHelper.put(url=volume_attach_query, data=attach_data, volume_id=volume_id)
|
||||
assert_that(response.status_code, equal_to(200))
|
||||
|
||||
list_query = "{url}/project/{project}/volumes?start={start}&end={end}"
|
||||
response = self.almanachHelper.get(url=list_query,
|
||||
project=project_id,
|
||||
start="2016-01-01 18:39:00.000",
|
||||
end="2016-01-01 18:41:00.000")
|
||||
|
||||
assert_that(response.status_code, equal_to(200))
|
||||
assert_that(response.json(), has_item({'entity_id': volume_id,
|
||||
'attached_to': [instance_id],
|
||||
'end': None,
|
||||
'name': volume_data['volume_name'],
|
||||
'entity_type': 'volume',
|
||||
'last_event': '2016-01-01 18:40:00+00:00',
|
||||
'volume_type': messages.DEFAULT_VOLUME_TYPE,
|
||||
'start': '2016-01-01 18:40:00+00:00',
|
||||
'project_id': project_id,
|
||||
'size': volume_data['size']}))
|
||||
|
||||
def test_volume_attach_bad_date_format(self):
|
||||
volume_attach_query = "{url}/volume/my_test_volume_id/attach"
|
||||
attach_data = {'date': 'A_BAD_DATE',
|
||||
'attachments': ['AN_INSTANCE']}
|
||||
|
||||
response = self.almanachHelper.put(url=volume_attach_query, data=attach_data)
|
||||
assert_that(response.status_code, equal_to(400))
|
||||
assert_that(response.json(), has_entry(
|
||||
'error',
|
||||
'The provided date has an invalid format. Format should be of yyyy-mm-ddThh:mm:ss.msZ, '
|
||||
'ex: 2015-01-31T18:24:34.1523Z'
|
||||
))
|
||||
|
||||
def test_volume_attach_missing_param(self):
|
||||
volume_attach_query = "{url}/volume/my_test_volume_id/attach"
|
||||
attach_data = {'attachments': ['AN_INSTANCE']}
|
||||
|
||||
response = self.almanachHelper.put(url=volume_attach_query, data=attach_data)
|
||||
assert_that(response.status_code, equal_to(400))
|
||||
assert_that(response.json(), has_entry(
|
||||
"error",
|
||||
"The 'date' param is mandatory for the request you have made."
|
||||
))
|
||||
|
||||
def test_volume_detach(self):
|
||||
instance_create_query = "{url}/project/{project}/instance"
|
||||
project_id = "my_test_project_id"
|
||||
instance_id = str(uuid4())
|
||||
instance_data = {'id': instance_id,
|
||||
'created_at': '2016-01-01T18:30:00Z',
|
||||
'name': 'integration_test_instance_FlavorA',
|
||||
'flavor': 'FlavorA',
|
||||
'os_type': 'Linux',
|
||||
'os_distro': 'Ubuntu',
|
||||
'os_version': '14.04'}
|
||||
|
||||
response = self.almanachHelper.post(url=instance_create_query, data=instance_data, project=project_id)
|
||||
assert_that(response.status_code, equal_to(201))
|
||||
|
||||
volume_create_query = "{url}/project/{project}/volume"
|
||||
project_id = "my_test_project_id"
|
||||
volume_id = str(uuid4())
|
||||
volume_data = {'volume_id': volume_id,
|
||||
'attached_to': [instance_id],
|
||||
'volume_name': messages.DEFAULT_VOLUME_NAME,
|
||||
'volume_type': messages.DEFAULT_VOLUME_TYPE,
|
||||
'start': '2016-01-01T18:30:30Z',
|
||||
'size': 100}
|
||||
|
||||
response = self.almanachHelper.post(url=volume_create_query, data=volume_data, project=project_id)
|
||||
assert_that(response.status_code, equal_to(201))
|
||||
|
||||
detach_data = {'date': '2016-01-01T18:40:00Z',
|
||||
'attachments': []}
|
||||
|
||||
volume_detach_query = "{url}/volume/{volume_id}/detach"
|
||||
response = self.almanachHelper.put(url=volume_detach_query, data=detach_data, volume_id=volume_id)
|
||||
assert_that(response.status_code, equal_to(200))
|
||||
|
||||
list_query = "{url}/project/{project}/volumes?start={start}&end={end}"
|
||||
response = self.almanachHelper.get(url=list_query,
|
||||
project=project_id,
|
||||
start="2016-01-01 18:39:00.000",
|
||||
end="2016-01-01 18:41:00.000")
|
||||
|
||||
assert_that(response.status_code, equal_to(200))
|
||||
assert_that(response.json(), has_item({'entity_id': volume_id,
|
||||
'attached_to': detach_data['attachments'],
|
||||
'end': None,
|
||||
'name': volume_data['volume_name'],
|
||||
'entity_type': 'volume',
|
||||
'last_event': '2016-01-01 18:40:00+00:00',
|
||||
'volume_type': messages.DEFAULT_VOLUME_TYPE,
|
||||
'start': '2016-01-01 18:40:00+00:00',
|
||||
'project_id': project_id,
|
||||
'size': volume_data['size']}))
|
||||
|
||||
def test_volume_detach_bad_date_format(self):
|
||||
volume_detach_query = "{url}/volume/my_test_volume_id/detach"
|
||||
attach_data = {'date': 'A_BAD_DATE',
|
||||
'attachments': ['AN_INSTANCE']}
|
||||
|
||||
response = self.almanachHelper.put(url=volume_detach_query, data=attach_data)
|
||||
assert_that(response.status_code, equal_to(400))
|
||||
assert_that(response.json(), has_entry(
|
||||
'error',
|
||||
'The provided date has an invalid format. Format should be of yyyy-mm-ddThh:mm:ss.msZ, '
|
||||
'ex: 2015-01-31T18:24:34.1523Z'
|
||||
))
|
||||
|
||||
def test_volume_detach_missing_param(self):
|
||||
volume_detach_query = "{url}/volume/my_test_volume_id/detach"
|
||||
attach_data = {'attachments': ['AN_INSTANCE']}
|
||||
|
||||
response = self.almanachHelper.put(url=volume_detach_query, data=attach_data)
|
||||
assert_that(response.status_code, equal_to(400))
|
||||
assert_that(response.json(), has_entry(
|
||||
"error",
|
||||
"The 'date' param is mandatory for the request you have made."
|
||||
))
|
||||
|
||||
def test_volume_type_create(self):
|
||||
volume_type_query = "{url}/volume_type"
|
||||
volume_type_id = str(uuid4())
|
||||
data = dict(
|
||||
type_id=volume_type_id,
|
||||
type_name=messages.DEFAULT_VOLUME_NAME
|
||||
)
|
||||
|
||||
response = self.almanachHelper.post(url=volume_type_query, data=data)
|
||||
assert_that(response.status_code, equal_to(201))
|
||||
|
||||
volume_type_get_query = "{url}/volume_type/{volume_type_id}"
|
||||
|
||||
response = self.almanachHelper.get(url=volume_type_get_query, volume_type_id=volume_type_id)
|
||||
assert_that(response.status_code, equal_to(200))
|
||||
assert_that(response.json(), equal_to({'volume_type_id': data['type_id'],
|
||||
'volume_type_name': data['type_name']}))
|
||||
|
||||
@classmethod
|
||||
@retry(exceptions=AssertionError, delay=10, max_delay=300)
|
||||
def _wait_until_volume_type_is_created(cls):
|
||||
assert_that(cls._get_volume_types(messages.DEFAULT_VOLUME_TYPE),
|
||||
has_entry("volume_type_id", messages.DEFAULT_VOLUME_TYPE))
|
||||
|
||||
@classmethod
|
||||
def _get_volume_types(cls, type_id):
|
||||
query = "{url}/volume_type/{type_id}"
|
||||
response = cls.almanachHelper.get(url=query, type_id=type_id)
|
||||
return response.json()
|
55
integration_tests/test_collector_instance_create.py
Normal file
55
integration_tests/test_collector_instance_create.py
Normal file
@ -0,0 +1,55 @@
|
||||
# Copyright 2016 Internap.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import pytz
|
||||
import unittest
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from hamcrest import has_entry, is_not, assert_that
|
||||
from retry import retry
|
||||
|
||||
from builders.messages import get_instance_delete_end_sample, get_instance_create_end_sample
|
||||
from helpers.almanach_helper import AlmanachHelper
|
||||
from helpers.rabbit_mq_helper import RabbitMqHelper
|
||||
|
||||
|
||||
class CollectorTestInstance(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.almanachHelper = AlmanachHelper()
|
||||
self.rabbitMqHelper = RabbitMqHelper()
|
||||
|
||||
def test_when_instance_delete_received_before_create_instance(self):
|
||||
tenant_id = str(uuid.uuid4())
|
||||
instance_id = str(uuid.uuid4())
|
||||
|
||||
self.rabbitMqHelper.push(
|
||||
get_instance_delete_end_sample(
|
||||
instance_id=instance_id,
|
||||
tenant_id=tenant_id,
|
||||
deletion_timestamp=datetime(2016, 2, 1, 10, 0, 0, tzinfo=pytz.utc)
|
||||
))
|
||||
|
||||
self.rabbitMqHelper.push(
|
||||
get_instance_create_end_sample(
|
||||
instance_id=instance_id,
|
||||
tenant_id=tenant_id,
|
||||
creation_timestamp=datetime(2016, 2, 1, 9, 0, 0, tzinfo=pytz.utc)
|
||||
))
|
||||
|
||||
self.assert_instance_delete_received_before_instance_create(tenant_id)
|
||||
|
||||
@retry(exceptions=AssertionError, delay=10, max_delay=300)
|
||||
def assert_instance_delete_received_before_instance_create(self, tenant_id):
|
||||
assert_that(self.almanachHelper.get_entities(tenant_id, "2016-01-01 00:00:00.000"),
|
||||
is_not(has_entry("end", None)))
|
110
integration_tests/test_collector_multi_attach.py
Normal file
110
integration_tests/test_collector_multi_attach.py
Normal file
@ -0,0 +1,110 @@
|
||||
# Copyright 2016 Internap.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import time
|
||||
import uuid
|
||||
import pytz
|
||||
|
||||
from datetime import datetime
|
||||
from hamcrest import assert_that, equal_to
|
||||
from builders import messages
|
||||
from helpers.rabbit_mq_helper import RabbitMqHelper
|
||||
from helpers.almanach_helper import AlmanachHelper
|
||||
from helpers.mongo_helper import MongoHelper
|
||||
from base_api_testcase import BaseApiTestCase
|
||||
|
||||
|
||||
class CollectorMultiAttachTest(BaseApiTestCase):
|
||||
tenant_id = None
|
||||
rabbitMqHelper = None
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls.almanachHelper = AlmanachHelper()
|
||||
cls.rabbitMqHelper = RabbitMqHelper()
|
||||
cls.prepare_dataset()
|
||||
|
||||
@classmethod
|
||||
def prepare_dataset(cls):
|
||||
MongoHelper().drop_database()
|
||||
cls.tenant_id = "my-tenant-" + str(uuid.uuid4())
|
||||
cls.setup_volume_type()
|
||||
cls.setup_attached_kilo_volume(cls.tenant_id)
|
||||
cls.setup_detached_kilo_volume(cls.tenant_id)
|
||||
time.sleep(5) # todo replace me with @retry
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
MongoHelper().drop_database()
|
||||
|
||||
def test_kilo_volume_attach(self):
|
||||
entities = self.query_almanach()
|
||||
|
||||
matches = [x for x in entities if x.get('entity_id') == 'attached-volume-kilo']
|
||||
assert_that(len(matches), equal_to(1))
|
||||
assert_that(len(matches[0].get('attached_to')), equal_to(2))
|
||||
|
||||
def test_kilo_volume_detach(self):
|
||||
entities = self.query_almanach()
|
||||
|
||||
detached_matches = [x for x in entities if
|
||||
x.get('entity_id') == 'detached-volume-kilo' and x.get('attached_to') == []]
|
||||
assert_that(len(detached_matches), equal_to(2))
|
||||
|
||||
unattached_matches = [x for x in entities if
|
||||
x.get('entity_id') == 'detached-volume-kilo' and x.get('attached_to') == ["my_vm"]]
|
||||
assert_that(len(unattached_matches), equal_to(1))
|
||||
|
||||
def query_almanach(self):
|
||||
response = self.almanachHelper.get(url="{url}/project/{project}/entities?start={start}",
|
||||
project=self.tenant_id,
|
||||
start="2010-01-01 18:50:00.000")
|
||||
|
||||
return json.loads(response.text)
|
||||
|
||||
@classmethod
|
||||
def setup_attached_kilo_volume(cls, tenant_id):
|
||||
cls.push(message=messages.get_volume_create_end_sample(
|
||||
volume_id="attached-volume-kilo", tenant_id=tenant_id, volume_type=messages.DEFAULT_VOLUME_TYPE)
|
||||
)
|
||||
|
||||
cls.push(message=messages.get_volume_attach_kilo_end_sample(
|
||||
volume_id="attached-volume-kilo", tenant_id=tenant_id, attached_to=["vm1"]))
|
||||
|
||||
cls.push(message=messages.get_volume_attach_kilo_end_sample(
|
||||
volume_id="attached-volume-kilo", tenant_id=tenant_id, attached_to=["vm1", "vm2"]))
|
||||
|
||||
@classmethod
|
||||
def setup_detached_kilo_volume(cls, tenant_id):
|
||||
cls.push(message=messages.get_volume_create_end_sample(
|
||||
volume_id="detached-volume-kilo", tenant_id=tenant_id, volume_type=messages.DEFAULT_VOLUME_TYPE)
|
||||
)
|
||||
|
||||
cls.push(message=messages.get_volume_attach_kilo_end_sample(
|
||||
volume_id="detached-volume-kilo", tenant_id=tenant_id, attached_to=["my_vm"],
|
||||
timestamp=datetime(2015, 7, 29, 8, 1, 59, tzinfo=pytz.utc)))
|
||||
|
||||
cls.push(message=messages.get_volume_detach_kilo_end_sample(
|
||||
volume_id="detached-volume-kilo", tenant_id=tenant_id, attached_to=[],
|
||||
timestamp=datetime(2015, 7, 30, 8, 1, 59, tzinfo=pytz.utc)))
|
||||
|
||||
@classmethod
|
||||
def setup_volume_type(cls):
|
||||
cls.push(message=messages.get_volume_type_create_sample(
|
||||
volume_type_id=messages.DEFAULT_VOLUME_TYPE, volume_type_name=messages.DEFAULT_VOLUME_TYPE))
|
||||
|
||||
@classmethod
|
||||
def push(cls, message):
|
||||
cls.rabbitMqHelper.push(message)
|
@ -12,15 +12,14 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import pytz
|
||||
import unittest
|
||||
from datetime import datetime
|
||||
|
||||
import pytz
|
||||
from flexmock import flexmock, flexmock_teardown
|
||||
|
||||
from almanach.common.exceptions.almanach_entity_not_found_exception import AlmanachEntityNotFoundException
|
||||
from tests import messages
|
||||
from almanach.adapters.bus_adapter import BusAdapter
|
||||
from almanach.common.exceptions.almanach_entity_not_found_exception import AlmanachEntityNotFoundException
|
||||
from integration_tests.builders import messages
|
||||
|
||||
|
||||
class BusAdapterTest(unittest.TestCase):
|
||||
|
8
tox-integration.ini
Normal file
8
tox-integration.ini
Normal file
@ -0,0 +1,8 @@
|
||||
[tox]
|
||||
envlist = integration-tests
|
||||
skipsdist = True
|
||||
|
||||
[testenv:integration-tests]
|
||||
passenv = *
|
||||
deps = -r{toxinidir}/integration-test-requirements.txt
|
||||
commands = nosetests -s --tests integration_tests
|
6
tox.ini
6
tox.ini
@ -3,10 +3,12 @@ envlist = py27,flake8
|
||||
|
||||
[testenv]
|
||||
deps = -r{toxinidir}/test-requirements.txt
|
||||
commands =
|
||||
nosetests --tests tests
|
||||
setenv =
|
||||
PYTHONPATH = {toxinidir}
|
||||
commands = nosetests --tests tests
|
||||
|
||||
[testenv:flake8]
|
||||
deps = -r{toxinidir}/test-requirements.txt
|
||||
commands = flake8
|
||||
|
||||
[flake8]
|
||||
|
Loading…
Reference in New Issue
Block a user