API microversion 2.69: Handles Down Cells
This patch introduces a new API microversion which will be used to alter the responses of ``GET /servers``, ``GET /servers/detail``, ``GET /servers/{server_id}`` and ``GET /os-services`` when a cell is down to return minimal constructs with missing keys. Implements blueprint handling-down-cell Change-Id: Id9f12532897912b39093f63e9286540d9029edeb
This commit is contained in:
parent
045d883772
commit
983abb05d7
@ -55,6 +55,15 @@ Response
|
||||
.. literalinclude:: ../../doc/api_samples/os-services/v2.11/services-list-get-resp.json
|
||||
:language: javascript
|
||||
|
||||
**Example List Compute Services (2.69)**
|
||||
|
||||
This is a sample response for the services from the non-responsive part of the
|
||||
deployment. The responses for the available service records will be normal
|
||||
without any missing keys.
|
||||
|
||||
.. literalinclude:: ../../doc/api_samples/os-services/v2.69/services-list-get-resp.json
|
||||
:language: javascript
|
||||
|
||||
Disable Scheduling For A Compute Service
|
||||
========================================
|
||||
|
||||
|
@ -249,6 +249,15 @@ Response
|
||||
.. literalinclude:: ../../doc/api_samples/servers/servers-list-resp.json
|
||||
:language: javascript
|
||||
|
||||
**Example List Servers (2.69)**
|
||||
|
||||
This is a sample response for the servers from the non-responsive part of the
|
||||
deployment. The responses for the available server records will be normal
|
||||
without any missing keys.
|
||||
|
||||
.. literalinclude:: ../../doc/api_samples/servers/v2.69/servers-list-resp.json
|
||||
:language: javascript
|
||||
|
||||
|
||||
Create Server
|
||||
=============
|
||||
@ -627,6 +636,15 @@ Response
|
||||
.. literalinclude:: /../../doc/api_samples/servers/v2.63/servers-details-resp.json
|
||||
:language: javascript
|
||||
|
||||
**Example List Servers Detailed (2.69)**
|
||||
|
||||
This is a sample response for the servers from the non-responsive part of the
|
||||
deployment. The responses for the available server records will be normal
|
||||
without any missing keys.
|
||||
|
||||
.. literalinclude:: ../../doc/api_samples/servers/v2.69/servers-details-resp.json
|
||||
:language: javascript
|
||||
|
||||
|
||||
Show Server Details
|
||||
===================
|
||||
@ -732,6 +750,15 @@ Response
|
||||
.. literalinclude:: ../../doc/api_samples/servers/v2.63/server-get-resp.json
|
||||
:language: javascript
|
||||
|
||||
**Example Show Server Details (2.69)**
|
||||
|
||||
This is a sample response for a server from the non-responsive part of the
|
||||
deployment. The responses for available server records will be normal
|
||||
without any missing keys.
|
||||
|
||||
.. literalinclude:: ../../doc/api_samples/servers/v2.69/server-get-resp.json
|
||||
:language: javascript
|
||||
|
||||
Update Server
|
||||
=============
|
||||
|
||||
|
@ -0,0 +1,14 @@
|
||||
{
|
||||
"services": [
|
||||
{
|
||||
"binary": "nova-compute",
|
||||
"host": "host1",
|
||||
"status": "UNKNOWN"
|
||||
},
|
||||
{
|
||||
"binary": "nova-compute",
|
||||
"host": "host2",
|
||||
"status": "UNKNOWN"
|
||||
}
|
||||
]
|
||||
}
|
20
doc/api_samples/servers/v2.69/server-create-req.json
Normal file
20
doc/api_samples/servers/v2.69/server-create-req.json
Normal file
@ -0,0 +1,20 @@
|
||||
{
|
||||
"server" : {
|
||||
"accessIPv4": "1.2.3.4",
|
||||
"accessIPv6": "80fe::",
|
||||
"name" : "new-server-test",
|
||||
"imageRef" : "70a599e0-31e7-49b7-b260-868f441e862b",
|
||||
"flavorRef" : "http://openstack.example.com/flavors/1",
|
||||
"OS-DCF:diskConfig": "AUTO",
|
||||
"metadata" : {
|
||||
"My Server Name" : "Apache1"
|
||||
},
|
||||
"security_groups": [
|
||||
{
|
||||
"name": "default"
|
||||
}
|
||||
],
|
||||
"user_data" : "IyEvYmluL2Jhc2gKL2Jpbi9zdQplY2hvICJJIGFtIGluIHlvdSEiCg==",
|
||||
"networks": "auto"
|
||||
}
|
||||
}
|
22
doc/api_samples/servers/v2.69/server-create-resp.json
Normal file
22
doc/api_samples/servers/v2.69/server-create-resp.json
Normal file
@ -0,0 +1,22 @@
|
||||
{
|
||||
"server": {
|
||||
"OS-DCF:diskConfig": "AUTO",
|
||||
"adminPass": "mqtDAwb2y7Zh",
|
||||
"id": "6f81aefe-472a-49d8-ba8d-758a5082c7e5",
|
||||
"links": [
|
||||
{
|
||||
"href": "http://openstack.example.com/v2.1/6f70656e737461636b20342065766572/servers/6f81aefe-472a-49d8-ba8d-758a5082c7e5",
|
||||
"rel": "self"
|
||||
},
|
||||
{
|
||||
"href": "http://openstack.example.com/6f70656e737461636b20342065766572/servers/6f81aefe-472a-49d8-ba8d-758a5082c7e5",
|
||||
"rel": "bookmark"
|
||||
}
|
||||
],
|
||||
"security_groups": [
|
||||
{
|
||||
"name": "default"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
29
doc/api_samples/servers/v2.69/server-get-resp.json
Normal file
29
doc/api_samples/servers/v2.69/server-get-resp.json
Normal file
@ -0,0 +1,29 @@
|
||||
{
|
||||
"server": {
|
||||
"OS-EXT-AZ:availability_zone": "UNKNOWN",
|
||||
"OS-EXT-STS:power_state": 0,
|
||||
"created": "2018-12-03T21:06:18Z",
|
||||
"flavor": {
|
||||
"disk": 1,
|
||||
"ephemeral": 0,
|
||||
"extra_specs": {},
|
||||
"original_name": "m1.tiny",
|
||||
"ram": 512,
|
||||
"swap": 0,
|
||||
"vcpus": 1
|
||||
},
|
||||
"id": "33748c23-38dd-4f70-b774-522fc69e7b67",
|
||||
"image": {
|
||||
"id": "70a599e0-31e7-49b7-b260-868f441e862b",
|
||||
"links": [
|
||||
{
|
||||
"href": "http://openstack.example.com/6f70656e737461636b20342065766572/images/70a599e0-31e7-49b7-b260-868f441e862b",
|
||||
"rel": "bookmark"
|
||||
}
|
||||
]
|
||||
},
|
||||
"status": "UNKNOWN",
|
||||
"tenant_id": "project",
|
||||
"user_id": "fake"
|
||||
}
|
||||
}
|
20
doc/api_samples/servers/v2.69/servers-details-resp.json
Normal file
20
doc/api_samples/servers/v2.69/servers-details-resp.json
Normal file
@ -0,0 +1,20 @@
|
||||
{
|
||||
"servers": [
|
||||
{
|
||||
"created": "2018-12-03T21:06:18Z",
|
||||
"id": "b6b0410f-b65f-4473-855e-5d82a71759e0",
|
||||
"status": "UNKNOWN",
|
||||
"tenant_id": "6f70656e737461636b20342065766572",
|
||||
"links": [
|
||||
{
|
||||
"href": "http://openstack.example.com/v2.1/6f70656e737461636b20342065766572/servers/b6b0410f-b65f-4473-855e-5d82a71759e0",
|
||||
"rel": "self"
|
||||
},
|
||||
{
|
||||
"href": "http://openstack.example.com/6f70656e737461636b20342065766572/servers/b6b0410f-b65f-4473-855e-5d82a71759e0",
|
||||
"rel": "bookmark"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
18
doc/api_samples/servers/v2.69/servers-list-resp.json
Normal file
18
doc/api_samples/servers/v2.69/servers-list-resp.json
Normal file
@ -0,0 +1,18 @@
|
||||
{
|
||||
"servers": [
|
||||
{
|
||||
"id": "2e136db7-b4a4-4815-8a00-25d9bfe59617",
|
||||
"status": "UNKNOWN",
|
||||
"links": [
|
||||
{
|
||||
"href": "http://openstack.example.com/v2.1/6f70656e737461636b20342065766572/servers/2e136db7-b4a4-4815-8a00-25d9bfe59617",
|
||||
"rel": "self"
|
||||
},
|
||||
{
|
||||
"href": "http://openstack.example.com/6f70656e737461636b20342065766572/servers/2e136db7-b4a4-4815-8a00-25d9bfe59617",
|
||||
"rel": "bookmark"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
@ -19,7 +19,7 @@
|
||||
}
|
||||
],
|
||||
"status": "CURRENT",
|
||||
"version": "2.68",
|
||||
"version": "2.69",
|
||||
"min_version": "2.1",
|
||||
"updated": "2013-07-23T11:33:21Z"
|
||||
}
|
||||
|
@ -22,7 +22,7 @@
|
||||
}
|
||||
],
|
||||
"status": "CURRENT",
|
||||
"version": "2.68",
|
||||
"version": "2.69",
|
||||
"min_version": "2.1",
|
||||
"updated": "2013-07-23T11:33:21Z"
|
||||
}
|
||||
|
@ -166,6 +166,10 @@ REST_API_VERSION_HISTORY = """REST API Version History:
|
||||
``block_device_mapping_v2`` parameter when creating a server.
|
||||
* 2.68 - Remove support for forced live migration and evacuate server
|
||||
actions.
|
||||
* 2.69 - Add support for returning minimal constructs for ``GET /servers``,
|
||||
``GET /servers/detail``, ``GET /servers/{server_id}`` and
|
||||
``GET /os-services`` when there is a transient unavailability
|
||||
condition in the deployment like an infrastructure failure.
|
||||
"""
|
||||
|
||||
# The minimum and maximum versions of the API supported
|
||||
@ -174,7 +178,7 @@ REST_API_VERSION_HISTORY = """REST API Version History:
|
||||
# Note(cyeoh): This only applies for the v2.1 API once microversions
|
||||
# support is fully merged. It does not affect the V2 API.
|
||||
_MIN_API_VERSION = "2.1"
|
||||
_MAX_API_VERSION = "2.68"
|
||||
_MAX_API_VERSION = "2.69"
|
||||
DEFAULT_API_VERSION = _MIN_API_VERSION
|
||||
|
||||
# Almost all proxy APIs which are related to network, images and baremetal
|
||||
|
@ -49,6 +49,8 @@ from nova import utils
|
||||
|
||||
TAG_SEARCH_FILTERS = ('tags', 'tags-any', 'not-tags', 'not-tags-any')
|
||||
DEVICE_TAGGING_MIN_COMPUTE_VERSION = 14
|
||||
PARTIAL_CONSTRUCT_FOR_CELL_DOWN_MIN_VERSION = '2.69'
|
||||
PAGING_SORTING_PARAMS = ('sort_key', 'sort_dir', 'limit', 'marker')
|
||||
|
||||
CONF = nova.conf.CONF
|
||||
|
||||
@ -107,19 +109,44 @@ class ServersController(wsgi.Controller):
|
||||
raise exc.HTTPBadRequest(explanation=err.format_message())
|
||||
return servers
|
||||
|
||||
@staticmethod
|
||||
def _is_cell_down_supported(req, search_opts):
|
||||
cell_down_support = api_version_request.is_supported(
|
||||
req, min_version=PARTIAL_CONSTRUCT_FOR_CELL_DOWN_MIN_VERSION)
|
||||
|
||||
if cell_down_support:
|
||||
# NOTE(tssurya): Minimal constructs would be returned from the down
|
||||
# cells if cell_down_support is True, however if filtering, sorting
|
||||
# or paging is requested by the user, then cell_down_support should
|
||||
# be made False and the down cells should be skipped (depending on
|
||||
# CONF.api.list_records_by_skipping_down_cells) as there is no
|
||||
# way to return correct results for the down cells in those
|
||||
# situations due to missing keys/information.
|
||||
# NOTE(tssurya): Since there is a chance that
|
||||
# remove_invalid_options function could have removed the paging and
|
||||
# sorting parameters, we add the additional check for that from the
|
||||
# request.
|
||||
pag_sort = any(
|
||||
ps in req.GET.keys() for ps in PAGING_SORTING_PARAMS)
|
||||
# NOTE(tssurya): ``nova list --all_tenants`` is the only
|
||||
# allowed filter exception when handling down cells.
|
||||
filters = list(search_opts.keys()) not in ([u'all_tenants'], [])
|
||||
if pag_sort or filters:
|
||||
cell_down_support = False
|
||||
return cell_down_support
|
||||
|
||||
def _get_servers(self, req, is_detail):
|
||||
"""Returns a list of servers, based on any search options specified."""
|
||||
|
||||
search_opts = {}
|
||||
search_opts.update(req.GET)
|
||||
|
||||
# NOTE(tssurya): Will be enabled after bumping the microversion.
|
||||
cell_down_support = False
|
||||
|
||||
context = req.environ['nova.context']
|
||||
remove_invalid_options(context, search_opts,
|
||||
self._get_server_search_options(req))
|
||||
|
||||
cell_down_support = self._is_cell_down_supported(req, search_opts)
|
||||
|
||||
for search_opt in search_opts:
|
||||
if (search_opt in
|
||||
schema_servers.JOINED_TABLE_QUERY_PARAMS_SERVERS.keys() or
|
||||
@ -396,12 +423,13 @@ class ServersController(wsgi.Controller):
|
||||
"""Returns server details by server id."""
|
||||
context = req.environ['nova.context']
|
||||
context.can(server_policies.SERVERS % 'show')
|
||||
# TODO(tssurya): enable cell_down_support after bumping the
|
||||
# microversion.
|
||||
cell_down_support = api_version_request.is_supported(
|
||||
req, min_version=PARTIAL_CONSTRUCT_FOR_CELL_DOWN_MIN_VERSION)
|
||||
instance = self._get_server(
|
||||
context, req, id, is_detail=True, cell_down_support=False)
|
||||
context, req, id, is_detail=True,
|
||||
cell_down_support=cell_down_support)
|
||||
return self._view_builder.show(
|
||||
req, instance, cell_down_support=False)
|
||||
req, instance, cell_down_support=cell_down_support)
|
||||
|
||||
@wsgi.response(202)
|
||||
@wsgi.expected_errors((400, 403, 409))
|
||||
@ -1212,7 +1240,7 @@ def remove_invalid_options(context, search_options, allowed_search_options):
|
||||
if context.can(server_policies.SERVERS % 'allow_all_filters',
|
||||
fatal=False):
|
||||
# Only remove parameters for sorting and pagination
|
||||
for key in ('sort_key', 'sort_dir', 'limit', 'marker'):
|
||||
for key in PAGING_SORTING_PARAMS:
|
||||
search_options.pop(key, None)
|
||||
return
|
||||
# Otherwise, strip out all unknown options
|
||||
|
@ -31,6 +31,7 @@ from nova import servicegroup
|
||||
from nova import utils
|
||||
|
||||
UUID_FOR_ID_MIN_VERSION = '2.53'
|
||||
PARTIAL_CONSTRUCT_FOR_CELL_DOWN_MIN_VERSION = '2.69'
|
||||
|
||||
|
||||
class ServiceController(wsgi.Controller):
|
||||
@ -59,10 +60,13 @@ class ServiceController(wsgi.Controller):
|
||||
context = req.environ['nova.context']
|
||||
context.can(services_policies.BASE_POLICY_NAME)
|
||||
|
||||
cell_down_support = api_version_request.is_supported(
|
||||
req, min_version=PARTIAL_CONSTRUCT_FOR_CELL_DOWN_MIN_VERSION)
|
||||
|
||||
_services = [
|
||||
s
|
||||
for s in self.host_api.service_get_all(context, set_zones=True,
|
||||
all_cells=True, cell_down_support=False)
|
||||
all_cells=True, cell_down_support=cell_down_support)
|
||||
if s['binary'] not in api_services
|
||||
]
|
||||
|
||||
@ -79,7 +83,16 @@ class ServiceController(wsgi.Controller):
|
||||
|
||||
return _services
|
||||
|
||||
def _get_service_detail(self, svc, additional_fields, req):
|
||||
def _get_service_detail(self, svc, additional_fields, req,
|
||||
cell_down_support=False):
|
||||
# NOTE(tssurya): The below logic returns a minimal service construct
|
||||
# consisting of only the host, binary and status fields for the compute
|
||||
# services in the down cell.
|
||||
if (cell_down_support and 'uuid' not in svc):
|
||||
return {'binary': svc.binary,
|
||||
'host': svc.host,
|
||||
'status': "UNKNOWN"}
|
||||
|
||||
alive = self.servicegroup_api.service_is_up(svc)
|
||||
state = (alive and "up") or "down"
|
||||
active = 'enabled'
|
||||
@ -116,8 +129,10 @@ class ServiceController(wsgi.Controller):
|
||||
|
||||
def _get_services_list(self, req, additional_fields=()):
|
||||
_services = self._get_services(req)
|
||||
return [self._get_service_detail(svc, additional_fields, req)
|
||||
for svc in _services]
|
||||
cell_down_support = api_version_request.is_supported(req,
|
||||
min_version=PARTIAL_CONSTRUCT_FOR_CELL_DOWN_MIN_VERSION)
|
||||
return [self._get_service_detail(svc, additional_fields, req,
|
||||
cell_down_support=cell_down_support) for svc in _services]
|
||||
|
||||
def _enable(self, body, context):
|
||||
"""Enable scheduling for a service."""
|
||||
|
@ -336,6 +336,11 @@ When set to False, this will cause the API to return a 500 error if there is an
|
||||
infrastructure failure like non-responsive cells. If you want the API to skip
|
||||
the down cells and return the results from the up cells set this option to
|
||||
True.
|
||||
|
||||
Note that from API microversion 2.69 there could be transient conditions in the
|
||||
deployment where certain records are not available and the results could be
|
||||
partial for certain requests containing those records. In those cases this
|
||||
option will be ignored.
|
||||
"""),
|
||||
]
|
||||
|
||||
|
@ -0,0 +1,14 @@
|
||||
{
|
||||
"services": [
|
||||
{
|
||||
"binary": "nova-compute",
|
||||
"host": "host1",
|
||||
"status": "UNKNOWN"
|
||||
},
|
||||
{
|
||||
"binary": "nova-compute",
|
||||
"host": "host2",
|
||||
"status": "UNKNOWN"
|
||||
}
|
||||
]
|
||||
}
|
@ -0,0 +1,20 @@
|
||||
{
|
||||
"server" : {
|
||||
"accessIPv4": "%(access_ip_v4)s",
|
||||
"accessIPv6": "%(access_ip_v6)s",
|
||||
"name" : "new-server-test",
|
||||
"imageRef" : "%(image_id)s",
|
||||
"flavorRef" : "1",
|
||||
"OS-DCF:diskConfig": "AUTO",
|
||||
"metadata" : {
|
||||
"My Server Name" : "Apache1"
|
||||
},
|
||||
"security_groups": [
|
||||
{
|
||||
"name": "default"
|
||||
}
|
||||
],
|
||||
"user_data" : "%(user_data)s",
|
||||
"networks": "auto"
|
||||
}
|
||||
}
|
@ -0,0 +1,22 @@
|
||||
{
|
||||
"server": {
|
||||
"OS-DCF:diskConfig": "AUTO",
|
||||
"adminPass": "%(password)s",
|
||||
"id": "%(id)s",
|
||||
"links": [
|
||||
{
|
||||
"href": "%(versioned_compute_endpoint)s/servers/%(uuid)s",
|
||||
"rel": "self"
|
||||
},
|
||||
{
|
||||
"href": "%(compute_endpoint)s/servers/%(uuid)s",
|
||||
"rel": "bookmark"
|
||||
}
|
||||
],
|
||||
"security_groups": [
|
||||
{
|
||||
"name": "default"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
@ -0,0 +1,29 @@
|
||||
{
|
||||
"server": {
|
||||
"OS-EXT-AZ:availability_zone": "UNKNOWN",
|
||||
"OS-EXT-STS:power_state": 0,
|
||||
"created": "%(isotime)s",
|
||||
"flavor": {
|
||||
"disk": 1,
|
||||
"ephemeral": 0,
|
||||
"extra_specs": {},
|
||||
"original_name": "m1.tiny",
|
||||
"ram": 512,
|
||||
"swap": 0,
|
||||
"vcpus": 1
|
||||
},
|
||||
"id": "%(id)s",
|
||||
"image": {
|
||||
"id": "70a599e0-31e7-49b7-b260-868f441e862b",
|
||||
"links": [
|
||||
{
|
||||
"href": "http://openstack.example.com/6f70656e737461636b20342065766572/images/70a599e0-31e7-49b7-b260-868f441e862b",
|
||||
"rel": "bookmark"
|
||||
}
|
||||
]
|
||||
},
|
||||
"status": "UNKNOWN",
|
||||
"tenant_id": "project",
|
||||
"user_id": "fake"
|
||||
}
|
||||
}
|
@ -0,0 +1,20 @@
|
||||
{
|
||||
"servers": [
|
||||
{
|
||||
"created": "%(isotime)s",
|
||||
"id": "%(uuid)s",
|
||||
"status": "UNKNOWN",
|
||||
"tenant_id": "6f70656e737461636b20342065766572",
|
||||
"links": [
|
||||
{
|
||||
"href": "%(versioned_compute_endpoint)s/servers/%(uuid)s",
|
||||
"rel": "self"
|
||||
},
|
||||
{
|
||||
"href": "%(compute_endpoint)s/servers/%(uuid)s",
|
||||
"rel": "bookmark"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
@ -0,0 +1,18 @@
|
||||
{
|
||||
"servers": [
|
||||
{
|
||||
"id": "%(uuid)s",
|
||||
"status": "UNKNOWN",
|
||||
"links": [
|
||||
{
|
||||
"href": "%(versioned_compute_endpoint)s/servers/%(uuid)s",
|
||||
"rel": "self"
|
||||
},
|
||||
{
|
||||
"href": "%(compute_endpoint)s/servers/%(uuid)s",
|
||||
"rel": "bookmark"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
@ -16,6 +16,7 @@
|
||||
import base64
|
||||
import time
|
||||
|
||||
from oslo_utils import fixture as utils_fixture
|
||||
from oslo_utils import timeutils
|
||||
import six
|
||||
|
||||
@ -366,6 +367,53 @@ class ServersSampleJson267Test(ServersSampleBase):
|
||||
return self._post_server(use_common_server_api_samples=False)
|
||||
|
||||
|
||||
class ServersSampleJson269Test(ServersSampleBase):
|
||||
microversion = '2.69'
|
||||
scenarios = [('v2_69', {'api_major_version': 'v2.1'})]
|
||||
|
||||
def setUp(self):
|
||||
super(ServersSampleJson269Test, self).setUp()
|
||||
|
||||
def _fake_instancemapping_get_by_cell_and_project(*args, **kwargs):
|
||||
# global cell based on which rest of the functions are stubbed out
|
||||
cell_fixture = nova_fixtures.SingleCellSimple()
|
||||
return [{
|
||||
'id': 1,
|
||||
'updated_at': None,
|
||||
'created_at': None,
|
||||
'instance_uuid': utils_fixture.uuidsentinel.inst,
|
||||
'cell_id': 1,
|
||||
'project_id': "6f70656e737461636b20342065766572",
|
||||
'cell_mapping': cell_fixture._fake_cell_list()[0],
|
||||
'queued_for_delete': False
|
||||
}]
|
||||
|
||||
self.stub_out('nova.objects.InstanceMappingList.'
|
||||
'_get_not_deleted_by_cell_and_project_from_db',
|
||||
_fake_instancemapping_get_by_cell_and_project)
|
||||
|
||||
def test_servers_list_from_down_cells(self):
|
||||
uuid = self._post_server(use_common_server_api_samples=False)
|
||||
with nova_fixtures.DownCellFixture():
|
||||
response = self._do_get('servers')
|
||||
subs = {'id': uuid}
|
||||
self._verify_response('servers-list-resp', subs, response, 200)
|
||||
|
||||
def test_servers_details_from_down_cells(self):
|
||||
uuid = self._post_server(use_common_server_api_samples=False)
|
||||
with nova_fixtures.DownCellFixture():
|
||||
response = self._do_get('servers/detail')
|
||||
subs = {'id': uuid}
|
||||
self._verify_response('servers-details-resp', subs, response, 200)
|
||||
|
||||
def test_server_get_from_down_cells(self):
|
||||
uuid = self._post_server(use_common_server_api_samples=False)
|
||||
with nova_fixtures.DownCellFixture():
|
||||
response = self._do_get('servers/%s' % uuid)
|
||||
subs = {'id': uuid}
|
||||
self._verify_response('server-get-resp', subs, response, 200)
|
||||
|
||||
|
||||
class ServersUpdateSampleJsonTest(ServersSampleBase):
|
||||
|
||||
def test_update_server(self):
|
||||
|
@ -16,6 +16,7 @@
|
||||
from oslo_utils import fixture as utils_fixture
|
||||
|
||||
from nova import exception
|
||||
from nova.tests import fixtures as nova_fixtures
|
||||
from nova.tests.functional.api_sample_tests import api_sample_base
|
||||
from nova.tests.unit.api.openstack.compute import test_services
|
||||
from nova.tests.unit.objects import test_compute_node
|
||||
@ -184,3 +185,46 @@ class ServicesV253JsonTest(ServicesV211JsonTest):
|
||||
'service-force-down-put-req', subs)
|
||||
self._verify_response('service-force-down-put-resp', subs,
|
||||
response, 200)
|
||||
|
||||
|
||||
class ServicesV269JsonTest(api_sample_base.ApiSampleTestBaseV21):
|
||||
ADMIN_API = True
|
||||
sample_dir = "os-services"
|
||||
microversion = '2.69'
|
||||
scenarios = [('v2_69', {'api_major_version': 'v2.1'})]
|
||||
|
||||
def setUp(self):
|
||||
super(ServicesV269JsonTest, self).setUp()
|
||||
|
||||
def _fake_cell_list(*args, **kwargs):
|
||||
return [{'id': 1,
|
||||
'updated_at': None,
|
||||
'created_at': None,
|
||||
'uuid': utils_fixture.uuidsentinel.cell1,
|
||||
'name': 'onlycell',
|
||||
'transport_url': 'fake://nowhere/',
|
||||
'database_connection': 'sqlite:///',
|
||||
'disabled': False}]
|
||||
|
||||
def fake_hostmappinglist_get(*args, **kwargs):
|
||||
cm = _fake_cell_list()[0]
|
||||
return [{'id': 1,
|
||||
'updated_at': None,
|
||||
'created_at': None,
|
||||
'host': 'host1',
|
||||
'cell_mapping': cm},
|
||||
{'id': 2,
|
||||
'updated_at': None,
|
||||
'created_at': None,
|
||||
'host': 'host2',
|
||||
'cell_mapping': cm}]
|
||||
|
||||
self.stub_out('nova.objects.HostMappingList._get_from_db',
|
||||
fake_hostmappinglist_get)
|
||||
|
||||
def test_get_services_from_down_cells(self):
|
||||
subs = {}
|
||||
with nova_fixtures.DownCellFixture():
|
||||
response = self._do_get('os-services')
|
||||
self._verify_response('services-list-get-resp', subs,
|
||||
response, 200)
|
||||
|
@ -49,6 +49,7 @@ from nova.tests.unit import fake_block_device
|
||||
from nova.tests.unit import fake_notifier
|
||||
from nova.tests.unit import fake_requests
|
||||
import nova.tests.unit.image.fake
|
||||
from nova.tests.unit.objects import test_instance_info_cache
|
||||
from nova.virt import fake
|
||||
from nova import volume
|
||||
|
||||
@ -1239,6 +1240,228 @@ class ServerTestV220(ServersTestBase):
|
||||
self._delete_server(server_id)
|
||||
|
||||
|
||||
class ServerTestV269(ServersTestBase):
|
||||
api_major_version = 'v2.1'
|
||||
NUMBER_OF_CELLS = 3
|
||||
|
||||
def setUp(self):
|
||||
super(ServerTestV269, self).setUp()
|
||||
self.api.microversion = '2.69'
|
||||
|
||||
self.ctxt = context.get_admin_context()
|
||||
self.project_id = self.api.project_id
|
||||
self.cells = objects.CellMappingList.get_all(self.ctxt)
|
||||
self.down_cell_insts = []
|
||||
self.up_cell_insts = []
|
||||
self.down_cell_mappings = objects.CellMappingList()
|
||||
flavor = objects.Flavor(id=1, name='flavor1',
|
||||
memory_mb=256, vcpus=1,
|
||||
root_gb=1, ephemeral_gb=1,
|
||||
flavorid='1',
|
||||
swap=0, rxtx_factor=1.0,
|
||||
vcpu_weight=1,
|
||||
disabled=False,
|
||||
is_public=True,
|
||||
extra_specs={},
|
||||
projects=[])
|
||||
_info_cache = objects.InstanceInfoCache(context)
|
||||
objects.InstanceInfoCache._from_db_object(context, _info_cache,
|
||||
test_instance_info_cache.fake_info_cache)
|
||||
# cell1 and cell2 will be the down cells while
|
||||
# cell0 and cell3 will be the up cells.
|
||||
down_cell_names = ['cell1', 'cell2']
|
||||
for cell in self.cells:
|
||||
# create 2 instances and their mappings in all the 4 cells
|
||||
for i in range(2):
|
||||
with context.target_cell(self.ctxt, cell) as cctxt:
|
||||
inst = objects.Instance(
|
||||
context=cctxt,
|
||||
project_id=self.project_id,
|
||||
user_id=self.project_id,
|
||||
instance_type_id=flavor.id,
|
||||
hostname='%s-inst%i' % (cell.name, i),
|
||||
flavor=flavor,
|
||||
info_cache=_info_cache,
|
||||
display_name='server-test')
|
||||
inst.create()
|
||||
im = objects.InstanceMapping(context=self.ctxt,
|
||||
instance_uuid=inst.uuid,
|
||||
cell_mapping=cell,
|
||||
project_id=self.project_id,
|
||||
queued_for_delete=False)
|
||||
im.create()
|
||||
if cell.name in down_cell_names:
|
||||
self.down_cell_insts.append(inst.uuid)
|
||||
else:
|
||||
self.up_cell_insts.append(inst.uuid)
|
||||
# In cell1 and cell3 add a third instance in a different project
|
||||
# to show the --all-tenants case.
|
||||
if cell.name == 'cell1' or cell.name == 'cell3':
|
||||
with context.target_cell(self.ctxt, cell) as cctxt:
|
||||
inst = objects.Instance(
|
||||
context=cctxt,
|
||||
project_id='faker',
|
||||
user_id='faker',
|
||||
instance_type_id=flavor.id,
|
||||
hostname='%s-inst%i' % (cell.name, 3),
|
||||
flavor=flavor,
|
||||
info_cache=_info_cache,
|
||||
display_name='server-test')
|
||||
inst.create()
|
||||
im = objects.InstanceMapping(context=self.ctxt,
|
||||
instance_uuid=inst.uuid,
|
||||
cell_mapping=cell,
|
||||
project_id='faker',
|
||||
queued_for_delete=False)
|
||||
im.create()
|
||||
if cell.name in down_cell_names:
|
||||
self.down_cell_mappings.objects.append(cell)
|
||||
self.useFixture(nova_fixtures.DownCellFixture(self.down_cell_mappings))
|
||||
|
||||
def test_get_servers_with_down_cells(self):
|
||||
servers = self.api.get_servers(detail=False)
|
||||
# 4 servers from the up cells and 4 servers from the down cells
|
||||
self.assertEqual(8, len(servers))
|
||||
for server in servers:
|
||||
if 'name' not in server:
|
||||
# server is in the down cell.
|
||||
self.assertEqual('UNKNOWN', server['status'])
|
||||
self.assertIn(server['id'], self.down_cell_insts)
|
||||
self.assertIn('links', server)
|
||||
# the partial construct will have only the above 3 keys
|
||||
self.assertEqual(3, len(server))
|
||||
else:
|
||||
# server in up cell
|
||||
self.assertIn(server['id'], self.up_cell_insts)
|
||||
# has all the keys
|
||||
self.assertEqual(server['name'], 'server-test')
|
||||
self.assertIn('links', server)
|
||||
|
||||
def test_get_servers_detail_with_down_cells(self):
|
||||
servers = self.api.get_servers()
|
||||
# 4 servers from the up cells and 4 servers from the down cells
|
||||
self.assertEqual(8, len(servers))
|
||||
for server in servers:
|
||||
if 'user_id' not in server:
|
||||
# server is in the down cell.
|
||||
self.assertEqual('UNKNOWN', server['status'])
|
||||
self.assertIn(server['id'], self.down_cell_insts)
|
||||
# the partial construct will have only 5 keys:
|
||||
# created, tenant_id, status, id and links.
|
||||
self.assertEqual(5, len(server))
|
||||
else:
|
||||
# server in up cell
|
||||
self.assertIn(server['id'], self.up_cell_insts)
|
||||
# has all the keys
|
||||
self.assertEqual(server['user_id'], self.project_id)
|
||||
self.assertIn('image', server)
|
||||
|
||||
def test_get_servers_detail_limits_with_down_cells(self):
|
||||
servers = self.api.get_servers(search_opts={'limit': 5})
|
||||
# 4 servers from the up cells since we skip down cell
|
||||
# results by default for paging.
|
||||
self.assertEqual(4, len(servers), servers)
|
||||
for server in servers:
|
||||
# server in up cell
|
||||
self.assertIn(server['id'], self.up_cell_insts)
|
||||
# has all the keys
|
||||
self.assertEqual(server['user_id'], self.project_id)
|
||||
self.assertIn('image', server)
|
||||
|
||||
def test_get_servers_detail_limits_with_down_cells_the_500_gift(self):
|
||||
self.flags(list_records_by_skipping_down_cells=False, group='api')
|
||||
# We get an API error with a 500 response code since the
|
||||
# list_records_by_skipping_down_cells config option is False.
|
||||
exp = self.assertRaises(client.OpenStackApiException,
|
||||
self.api.get_servers,
|
||||
search_opts={'limit': 5})
|
||||
self.assertEqual(500, exp.response.status_code)
|
||||
self.assertIn('NovaException', six.text_type(exp))
|
||||
|
||||
def test_get_servers_detail_marker_in_down_cells(self):
|
||||
marker = self.down_cell_insts[2]
|
||||
# It will fail with a 500 if the marker is in the down cell.
|
||||
exp = self.assertRaises(client.OpenStackApiException,
|
||||
self.api.get_servers,
|
||||
search_opts={'marker': marker})
|
||||
self.assertEqual(500, exp.response.status_code)
|
||||
self.assertIn('oslo_db.exception.DBError', six.text_type(exp))
|
||||
|
||||
def test_get_servers_detail_marker_sorting(self):
|
||||
marker = self.up_cell_insts[1]
|
||||
# It will give the results from the up cell if
|
||||
# list_records_by_skipping_down_cells config option is True.
|
||||
servers = self.api.get_servers(search_opts={'marker': marker,
|
||||
'sort_key': "created_at",
|
||||
'sort_dir': "asc"})
|
||||
# since there are 4 servers from the up cells, when giving the
|
||||
# second instance as marker, sorted by creation time in ascending
|
||||
# third and fourth instances will be returned.
|
||||
self.assertEqual(2, len(servers))
|
||||
for server in servers:
|
||||
self.assertIn(
|
||||
server['id'], [self.up_cell_insts[2], self.up_cell_insts[3]])
|
||||
|
||||
def test_get_servers_detail_non_admin_with_deleted_flag(self):
|
||||
# if list_records_by_skipping_down_cells config option is True
|
||||
# this deleted option should be ignored and the rest of the instances
|
||||
# from the up cells and the partial results from the down cells should
|
||||
# be returned.
|
||||
# Set the policy so we don't have permission to allow
|
||||
# all filters but are able to get server details.
|
||||
servers_rule = 'os_compute_api:servers:detail'
|
||||
extraspec_rule = 'os_compute_api:servers:allow_all_filters'
|
||||
self.policy.set_rules({
|
||||
extraspec_rule: 'rule:admin_api',
|
||||
servers_rule: '@'})
|
||||
servers = self.api.get_servers(search_opts={'deleted': True})
|
||||
# gets 4 results from up cells and 4 from down cells.
|
||||
self.assertEqual(8, len(servers))
|
||||
for server in servers:
|
||||
if "image" not in server:
|
||||
self.assertIn(server['id'], self.down_cell_insts)
|
||||
else:
|
||||
self.assertIn(server['id'], self.up_cell_insts)
|
||||
|
||||
def test_get_servers_detail_filters(self):
|
||||
# We get the results only from the up cells, this ignoring the down
|
||||
# cells if list_records_by_skipping_down_cells config option is True.
|
||||
api_fixture = self.useFixture(nova_fixtures.OSAPIFixture(
|
||||
api_version='v2.1'))
|
||||
self.admin_api = api_fixture.admin_api
|
||||
self.admin_api.microversion = '2.69'
|
||||
servers = self.admin_api.get_servers(
|
||||
search_opts={'hostname': "cell3-inst0"})
|
||||
self.assertEqual(1, len(servers))
|
||||
self.assertEqual(self.up_cell_insts[2], servers[0]['id'])
|
||||
|
||||
def test_get_servers_detail_all_tenants_with_down_cells(self):
|
||||
api_fixture = self.useFixture(nova_fixtures.OSAPIFixture(
|
||||
api_version='v2.1'))
|
||||
self.admin_api = api_fixture.admin_api
|
||||
self.admin_api.microversion = '2.69'
|
||||
servers = self.admin_api.get_servers(search_opts={'all_tenants': True})
|
||||
# 4 servers from the up cells and 4 servers from the down cells
|
||||
# plus the 2 instances from cell1 and cell3 which are in a different
|
||||
# project.
|
||||
self.assertEqual(10, len(servers))
|
||||
for server in servers:
|
||||
if 'user_id' not in server:
|
||||
# server is in the down cell.
|
||||
self.assertEqual('UNKNOWN', server['status'])
|
||||
if server['tenant_id'] != 'faker':
|
||||
self.assertIn(server['id'], self.down_cell_insts)
|
||||
# the partial construct will have only 5 keys:
|
||||
# created, tenant_id, status, id and links
|
||||
self.assertEqual(5, len(server))
|
||||
else:
|
||||
# server in up cell
|
||||
if server['tenant_id'] != 'faker':
|
||||
self.assertIn(server['id'], self.up_cell_insts)
|
||||
self.assertEqual(server['user_id'], self.project_id)
|
||||
self.assertIn('image', server)
|
||||
|
||||
|
||||
class ServerRebuildTestCase(integrated_helpers._IntegratedTestBase,
|
||||
integrated_helpers.InstanceHelperMixin):
|
||||
api_major_version = 'v2.1'
|
||||
|
Loading…
Reference in New Issue
Block a user