Merge "Implement export location metadata feature"
This commit is contained in:
commit
ea8276cd10
@ -37,11 +37,15 @@
|
||||
"share:unmanage": "rule:admin_api",
|
||||
"share:force_delete": "rule:admin_api",
|
||||
"share:reset_status": "rule:admin_api",
|
||||
"share_export_location:index": "rule:default",
|
||||
"share_export_location:show": "rule:default",
|
||||
|
||||
"share_instance:index": "rule:admin_api",
|
||||
"share_instance:show": "rule:admin_api",
|
||||
"share_instance:force_delete": "rule:admin_api",
|
||||
"share_instance:reset_status": "rule:admin_api",
|
||||
"share_instance_export_location:index": "rule:admin_api",
|
||||
"share_instance_export_location:show": "rule:admin_api",
|
||||
|
||||
"share_snapshot:create_snapshot": "rule:default",
|
||||
"share_snapshot:delete_snapshot": "rule:default",
|
||||
|
@ -54,13 +54,14 @@ REST_API_VERSION_HISTORY = """
|
||||
* 2.6 - Return share_type UUID instead of name in Share API
|
||||
* 2.7 - Rename old extension-like API URLs to core-API-like
|
||||
* 2.8 - Attr "is_public" can be set for share using API "manage"
|
||||
* 2.9 - Add export locations API
|
||||
"""
|
||||
|
||||
# The minimum and maximum versions of the API supported
|
||||
# The default api version request is defined to be the
|
||||
# the minimum version of the API supported.
|
||||
_MIN_API_VERSION = "2.0"
|
||||
_MAX_API_VERSION = "2.8"
|
||||
_MAX_API_VERSION = "2.9"
|
||||
DEFAULT_API_VERSION = _MIN_API_VERSION
|
||||
|
||||
|
||||
|
@ -69,3 +69,8 @@ user documentation.
|
||||
2.8
|
||||
---
|
||||
Allow to set share visibility explicitly using "manage" API.
|
||||
|
||||
2.9
|
||||
---
|
||||
Add export locations API. Remove export locations from "shares" and
|
||||
"share instances" APIs.
|
||||
|
@ -38,6 +38,8 @@ from manila.api.v2 import consistency_groups
|
||||
from manila.api.v2 import quota_class_sets
|
||||
from manila.api.v2 import quota_sets
|
||||
from manila.api.v2 import services
|
||||
from manila.api.v2 import share_export_locations
|
||||
from manila.api.v2 import share_instance_export_locations
|
||||
from manila.api.v2 import share_instances
|
||||
from manila.api.v2 import share_types
|
||||
from manila.api.v2 import shares
|
||||
@ -153,12 +155,43 @@ class APIRouter(manila.api.openstack.APIRouter):
|
||||
collection={"detail": "GET"},
|
||||
member={"action": "POST"})
|
||||
|
||||
self.resources["share_instance_export_locations"] = (
|
||||
share_instance_export_locations.create_resource())
|
||||
mapper.connect("share_instances",
|
||||
("/{project_id}/share_instances/{share_instance_id}/"
|
||||
"export_locations"),
|
||||
controller=self.resources[
|
||||
"share_instance_export_locations"],
|
||||
action="index",
|
||||
conditions={"method": ["GET"]})
|
||||
mapper.connect("share_instances",
|
||||
("/{project_id}/share_instances/{share_instance_id}/"
|
||||
"export_locations/{export_location_uuid}"),
|
||||
controller=self.resources[
|
||||
"share_instance_export_locations"],
|
||||
action="show",
|
||||
conditions={"method": ["GET"]})
|
||||
|
||||
mapper.connect("share_instance",
|
||||
"/{project_id}/shares/{share_id}/instances",
|
||||
controller=self.resources["share_instances"],
|
||||
action="get_share_instances",
|
||||
conditions={"method": ["GET"]})
|
||||
|
||||
self.resources["share_export_locations"] = (
|
||||
share_export_locations.create_resource())
|
||||
mapper.connect("shares",
|
||||
"/{project_id}/shares/{share_id}/export_locations",
|
||||
controller=self.resources["share_export_locations"],
|
||||
action="index",
|
||||
conditions={"method": ["GET"]})
|
||||
mapper.connect("shares",
|
||||
("/{project_id}/shares/{share_id}/"
|
||||
"export_locations/{export_location_uuid}"),
|
||||
controller=self.resources["share_export_locations"],
|
||||
action="show",
|
||||
conditions={"method": ["GET"]})
|
||||
|
||||
self.resources["snapshots"] = share_snapshots.create_resource()
|
||||
mapper.resource("snapshot", "snapshots",
|
||||
controller=self.resources["snapshots"],
|
||||
|
78
manila/api/v2/share_export_locations.py
Normal file
78
manila/api/v2/share_export_locations.py
Normal file
@ -0,0 +1,78 @@
|
||||
# Copyright 2015 Mirantis inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from webob import exc
|
||||
|
||||
from manila.api.openstack import wsgi
|
||||
from manila.api.views import export_locations as export_locations_views
|
||||
from manila.db import api as db_api
|
||||
from manila import exception
|
||||
from manila.i18n import _
|
||||
|
||||
|
||||
class ShareExportLocationController(wsgi.Controller):
|
||||
"""The Share Export Locations API controller."""
|
||||
|
||||
def __init__(self):
|
||||
self._view_builder_class = export_locations_views.ViewBuilder
|
||||
self.resource_name = 'share_export_location'
|
||||
super(self.__class__, self).__init__()
|
||||
|
||||
def _verify_share(self, context, share_id):
|
||||
try:
|
||||
db_api.share_get(context, share_id)
|
||||
except exception.NotFound:
|
||||
msg = _("Share '%s' not found.") % share_id
|
||||
raise exc.HTTPNotFound(explanation=msg)
|
||||
|
||||
@wsgi.Controller.api_version('2.9')
|
||||
@wsgi.Controller.authorize
|
||||
def index(self, req, share_id):
|
||||
"""Return a list of export locations for share."""
|
||||
|
||||
context = req.environ['manila.context']
|
||||
self._verify_share(context, share_id)
|
||||
if context.is_admin:
|
||||
export_locations = db_api.share_export_locations_get_by_share_id(
|
||||
context, share_id, include_admin_only=True)
|
||||
return self._view_builder.detail_list(export_locations)
|
||||
else:
|
||||
export_locations = db_api.share_export_locations_get_by_share_id(
|
||||
context, share_id, include_admin_only=False)
|
||||
return self._view_builder.summary_list(export_locations)
|
||||
|
||||
@wsgi.Controller.api_version('2.9')
|
||||
@wsgi.Controller.authorize
|
||||
def show(self, req, share_id, export_location_uuid):
|
||||
"""Return data about the requested export location."""
|
||||
context = req.environ['manila.context']
|
||||
self._verify_share(context, share_id)
|
||||
try:
|
||||
el = db_api.share_export_location_get_by_uuid(
|
||||
context, export_location_uuid)
|
||||
except exception.ExportLocationNotFound:
|
||||
msg = _("Export location '%s' not found.") % export_location_uuid
|
||||
raise exc.HTTPNotFound(explanation=msg)
|
||||
|
||||
if context.is_admin:
|
||||
return self._view_builder.detail(el)
|
||||
else:
|
||||
if not el.is_admin_only:
|
||||
return self._view_builder.summary(el)
|
||||
raise exc.HTTPForbidden()
|
||||
|
||||
|
||||
def create_resource():
|
||||
return wsgi.Resource(ShareExportLocationController())
|
67
manila/api/v2/share_instance_export_locations.py
Normal file
67
manila/api/v2/share_instance_export_locations.py
Normal file
@ -0,0 +1,67 @@
|
||||
# Copyright 2015 Mirantis inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import six
|
||||
from webob import exc
|
||||
|
||||
from manila.api.openstack import wsgi
|
||||
from manila.api.views import export_locations as export_locations_views
|
||||
from manila.db import api as db_api
|
||||
from manila import exception
|
||||
from manila.i18n import _
|
||||
|
||||
|
||||
class ShareInstanceExportLocationController(wsgi.Controller):
|
||||
"""The Share Instance Export Locations API controller."""
|
||||
|
||||
def __init__(self):
|
||||
self._view_builder_class = export_locations_views.ViewBuilder
|
||||
self.resource_name = 'share_instance_export_location'
|
||||
super(self.__class__, self).__init__()
|
||||
|
||||
def _verify_share_instance(self, context, share_instance_id):
|
||||
try:
|
||||
db_api.share_instance_get(context, share_instance_id)
|
||||
except exception.NotFound:
|
||||
msg = _("Share instance '%s' not found.") % share_instance_id
|
||||
raise exc.HTTPNotFound(explanation=msg)
|
||||
|
||||
@wsgi.Controller.api_version('2.9')
|
||||
@wsgi.Controller.authorize
|
||||
def index(self, req, share_instance_id):
|
||||
"""Return a list of export locations for the share instance."""
|
||||
context = req.environ['manila.context']
|
||||
self._verify_share_instance(context, share_instance_id)
|
||||
export_locations = (
|
||||
db_api.share_export_locations_get_by_share_instance_id(
|
||||
context, share_instance_id))
|
||||
return self._view_builder.detail_list(export_locations)
|
||||
|
||||
@wsgi.Controller.api_version('2.9')
|
||||
@wsgi.Controller.authorize
|
||||
def show(self, req, share_instance_id, export_location_uuid):
|
||||
"""Return data about the requested export location."""
|
||||
context = req.environ['manila.context']
|
||||
self._verify_share_instance(context, share_instance_id)
|
||||
try:
|
||||
el = db_api.share_export_location_get_by_uuid(
|
||||
context, export_location_uuid)
|
||||
return self._view_builder.detail(el)
|
||||
except exception.ExportLocationNotFound as e:
|
||||
raise exc.HTTPNotFound(explanation=six.text_type(e))
|
||||
|
||||
|
||||
def create_resource():
|
||||
return wsgi.Resource(ShareInstanceExportLocationController())
|
71
manila/api/views/export_locations.py
Normal file
71
manila/api/views/export_locations.py
Normal file
@ -0,0 +1,71 @@
|
||||
# Copyright (c) 2015 Mirantis Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from manila.api import common
|
||||
|
||||
|
||||
class ViewBuilder(common.ViewBuilder):
|
||||
"""Model export-locations API responses as a python dictionary."""
|
||||
|
||||
_collection_name = "export_locations"
|
||||
|
||||
def _get_export_location_view(self, export_location, detail=False):
|
||||
view = {
|
||||
'uuid': export_location['uuid'],
|
||||
'path': export_location['path'],
|
||||
'created_at': export_location['created_at'],
|
||||
'updated_at': export_location['updated_at'],
|
||||
}
|
||||
# TODO(vponomaryov): include metadata keys here as export location
|
||||
# attributes when such appear.
|
||||
#
|
||||
# Example having export_location['el_metadata'] as following:
|
||||
#
|
||||
# {'speed': '1Gbps', 'access': 'rw'}
|
||||
#
|
||||
# or
|
||||
#
|
||||
# {'speed': '100Mbps', 'access': 'ro'}
|
||||
#
|
||||
# view['speed'] = export_location['el_metadata'].get('speed')
|
||||
# view['access'] = export_location['el_metadata'].get('access')
|
||||
if detail:
|
||||
view['share_instance_id'] = export_location['share_instance_id']
|
||||
view['is_admin_only'] = export_location['is_admin_only']
|
||||
return {'export_location': view}
|
||||
|
||||
def summary(self, export_location):
|
||||
"""Summary view of a single export location."""
|
||||
return self._get_export_location_view(export_location, detail=False)
|
||||
|
||||
def detail(self, export_location):
|
||||
"""Detailed view of a single export location."""
|
||||
return self._get_export_location_view(export_location, detail=True)
|
||||
|
||||
def _list_export_locations(self, export_locations, detail=False):
|
||||
"""View of export locations list."""
|
||||
view_method = self.detail if detail else self.summary
|
||||
return {self._collection_name: [
|
||||
view_method(export_location)['export_location']
|
||||
for export_location in export_locations
|
||||
]}
|
||||
|
||||
def detail_list(self, export_locations):
|
||||
"""Detailed View of export locations list."""
|
||||
return self._list_export_locations(export_locations, detail=True)
|
||||
|
||||
def summary_list(self, export_locations):
|
||||
"""Summary View of export locations list."""
|
||||
return self._list_export_locations(export_locations, detail=False)
|
@ -18,6 +18,10 @@ class ViewBuilder(common.ViewBuilder):
|
||||
|
||||
_collection_name = 'share_instances'
|
||||
|
||||
_detail_version_modifiers = [
|
||||
"remove_export_locations",
|
||||
]
|
||||
|
||||
def detail_list(self, request, instances):
|
||||
"""Detailed view of a list of share instances."""
|
||||
return self._list_view(self.detail, request, instances)
|
||||
@ -38,7 +42,8 @@ class ViewBuilder(common.ViewBuilder):
|
||||
'export_location': share_instance.get('export_location'),
|
||||
'export_locations': export_locations,
|
||||
}
|
||||
|
||||
self.update_versioned_resource_dict(
|
||||
request, instance_dict, share_instance)
|
||||
return {'share_instance': instance_dict}
|
||||
|
||||
def _list_view(self, func, request, instances):
|
||||
@ -54,3 +59,8 @@ class ViewBuilder(common.ViewBuilder):
|
||||
instances_dict[self._collection_name] = instances_links
|
||||
|
||||
return instances_dict
|
||||
|
||||
@common.ViewBuilder.versioned_method("2.9")
|
||||
def remove_export_locations(self, share_instance_dict, share_instance):
|
||||
share_instance_dict.pop('export_location')
|
||||
share_instance_dict.pop('export_locations')
|
||||
|
@ -25,6 +25,7 @@ class ViewBuilder(common.ViewBuilder):
|
||||
"add_consistency_group_fields",
|
||||
"add_task_state_field",
|
||||
"modify_share_type_field",
|
||||
"remove_export_locations",
|
||||
]
|
||||
|
||||
def summary_list(self, request, shares):
|
||||
@ -117,6 +118,11 @@ class ViewBuilder(common.ViewBuilder):
|
||||
'share_type': share_type,
|
||||
})
|
||||
|
||||
@common.ViewBuilder.versioned_method("2.9")
|
||||
def remove_export_locations(self, share_dict, share):
|
||||
share_dict.pop('export_location')
|
||||
share_dict.pop('export_locations')
|
||||
|
||||
def _list_view(self, func, request, shares):
|
||||
"""Provide a view for a list of shares."""
|
||||
shares_list = [func(request, share)['share'] for share in shares]
|
||||
|
@ -597,20 +597,61 @@ def share_metadata_update(context, share, metadata, delete):
|
||||
|
||||
###################
|
||||
|
||||
def share_export_location_get_by_uuid(context, export_location_uuid):
|
||||
"""Get specific export location of a share."""
|
||||
return IMPL.share_export_location_get_by_uuid(
|
||||
context, export_location_uuid)
|
||||
|
||||
|
||||
def share_export_locations_get(context, share_id):
|
||||
"""Get all exports_locations of share."""
|
||||
"""Get all export locations of a share."""
|
||||
return IMPL.share_export_locations_get(context, share_id)
|
||||
|
||||
|
||||
def share_export_locations_get_by_share_id(context, share_id,
|
||||
include_admin_only=True):
|
||||
"""Get all export locations of a share by its ID."""
|
||||
return IMPL.share_export_locations_get_by_share_id(
|
||||
context, share_id, include_admin_only=include_admin_only)
|
||||
|
||||
|
||||
def share_export_locations_get_by_share_instance_id(context,
|
||||
share_instance_id):
|
||||
"""Get all export locations of a share instance by its ID."""
|
||||
return IMPL.share_export_locations_get_by_share_instance_id(
|
||||
context, share_instance_id)
|
||||
|
||||
|
||||
def share_export_locations_update(context, share_instance_id, export_locations,
|
||||
delete=True):
|
||||
"""Update export locations of share."""
|
||||
return IMPL.share_export_locations_update(context, share_instance_id,
|
||||
export_locations, delete)
|
||||
"""Update export locations of a share instance."""
|
||||
return IMPL.share_export_locations_update(
|
||||
context, share_instance_id, export_locations, delete)
|
||||
|
||||
|
||||
####################
|
||||
|
||||
def export_location_metadata_get(context, export_location_uuid, session=None):
|
||||
"""Get all metadata of an export location."""
|
||||
return IMPL.export_location_metadata_get(
|
||||
context, export_location_uuid, session=session)
|
||||
|
||||
|
||||
def export_location_metadata_delete(context, export_location_uuid, keys,
|
||||
session=None):
|
||||
"""Delete metadata of an export location."""
|
||||
return IMPL.export_location_metadata_delete(
|
||||
context, export_location_uuid, keys, session=session)
|
||||
|
||||
|
||||
def export_location_metadata_update(context, export_location_uuid, metadata,
|
||||
delete, session=None):
|
||||
"""Update metadata of an export location."""
|
||||
return IMPL.export_location_metadata_update(
|
||||
context, export_location_uuid, metadata, delete, session=session)
|
||||
|
||||
####################
|
||||
|
||||
|
||||
def share_network_create(context, values):
|
||||
"""Create a share network DB record."""
|
||||
|
@ -0,0 +1,120 @@
|
||||
# Copyright 2015 Mirantis Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Add DB support for share instance export locations metadata.
|
||||
|
||||
Revision ID: dda6de06349
|
||||
Revises: 323840a08dc4
|
||||
Create Date: 2015-11-30 13:50:15.914232
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'dda6de06349'
|
||||
down_revision = '323840a08dc4'
|
||||
|
||||
from alembic import op
|
||||
from oslo_log import log
|
||||
from oslo_utils import uuidutils
|
||||
import sqlalchemy as sa
|
||||
|
||||
from manila.i18n import _LE
|
||||
|
||||
SI_TABLE_NAME = 'share_instances'
|
||||
EL_TABLE_NAME = 'share_instance_export_locations'
|
||||
ELM_TABLE_NAME = 'share_instance_export_locations_metadata'
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
def upgrade():
|
||||
try:
|
||||
meta = sa.MetaData()
|
||||
meta.bind = op.get_bind()
|
||||
|
||||
# Add new 'is_admin_only' column in export locations table that will be
|
||||
# used for hiding admin export locations from common users in API.
|
||||
op.add_column(
|
||||
EL_TABLE_NAME,
|
||||
sa.Column('is_admin_only', sa.Boolean, default=False))
|
||||
|
||||
# Create new 'uuid' column as String(36) in export locations table
|
||||
# that will be used for API.
|
||||
op.add_column(
|
||||
EL_TABLE_NAME,
|
||||
sa.Column('uuid', sa.String(36), unique=True),
|
||||
)
|
||||
|
||||
# Generate UUID for each existing export location.
|
||||
el_table = sa.Table(
|
||||
EL_TABLE_NAME, meta,
|
||||
sa.Column('id', sa.Integer),
|
||||
sa.Column('uuid', sa.String(36)),
|
||||
sa.Column('is_admin_only', sa.Boolean),
|
||||
)
|
||||
for record in el_table.select().execute():
|
||||
el_table.update().values(
|
||||
is_admin_only=False,
|
||||
uuid=uuidutils.generate_uuid(),
|
||||
).where(
|
||||
el_table.c.id == record.id,
|
||||
).execute()
|
||||
|
||||
# Make new 'uuid' column in export locations table not nullable.
|
||||
op.alter_column(
|
||||
EL_TABLE_NAME,
|
||||
'uuid',
|
||||
existing_type=sa.String(length=36),
|
||||
nullable=False,
|
||||
)
|
||||
except Exception:
|
||||
LOG.error(_LE("Failed to update '%s' table!"),
|
||||
EL_TABLE_NAME)
|
||||
raise
|
||||
|
||||
try:
|
||||
op.create_table(
|
||||
ELM_TABLE_NAME,
|
||||
sa.Column('id', sa.Integer, primary_key=True),
|
||||
sa.Column('created_at', sa.DateTime),
|
||||
sa.Column('updated_at', sa.DateTime),
|
||||
sa.Column('deleted_at', sa.DateTime),
|
||||
sa.Column('deleted', sa.Integer),
|
||||
sa.Column('export_location_id', sa.Integer,
|
||||
sa.ForeignKey('%s.id' % EL_TABLE_NAME,
|
||||
name="elm_id_fk"), nullable=False),
|
||||
sa.Column('key', sa.String(length=255), nullable=False),
|
||||
sa.Column('value', sa.String(length=1023), nullable=False),
|
||||
sa.UniqueConstraint('export_location_id', 'key', 'deleted',
|
||||
name="elm_el_id_uc"),
|
||||
mysql_engine='InnoDB',
|
||||
)
|
||||
except Exception:
|
||||
LOG.error(_LE("Failed to create '%s' table!"), ELM_TABLE_NAME)
|
||||
raise
|
||||
|
||||
|
||||
def downgrade():
|
||||
try:
|
||||
op.drop_table(ELM_TABLE_NAME)
|
||||
except Exception:
|
||||
LOG.error(_LE("Failed to drop '%s' table!"), ELM_TABLE_NAME)
|
||||
raise
|
||||
|
||||
try:
|
||||
op.drop_column(EL_TABLE_NAME, 'is_admin_only')
|
||||
op.drop_column(EL_TABLE_NAME, 'uuid')
|
||||
except Exception:
|
||||
LOG.error(_LE("Failed to update '%s' table!"), EL_TABLE_NAME)
|
||||
raise
|
@ -179,6 +179,20 @@ def require_share_exists(f):
|
||||
return wrapper
|
||||
|
||||
|
||||
def require_share_instance_exists(f):
|
||||
"""Decorator to require the specified share instance to exist.
|
||||
|
||||
Requires the wrapped function to use context and share_instance_id as
|
||||
their first two arguments.
|
||||
"""
|
||||
|
||||
def wrapper(context, share_instance_id, *args, **kwargs):
|
||||
share_instance_get(context, share_instance_id)
|
||||
return f(context, share_instance_id, *args, **kwargs)
|
||||
wrapper.__name__ = f.__name__
|
||||
return wrapper
|
||||
|
||||
|
||||
def model_query(context, model, *args, **kwargs):
|
||||
"""Query helper that accounts for context's `read_deleted` field.
|
||||
|
||||
@ -1171,10 +1185,13 @@ def share_instance_get(context, share_instance_id, session=None,
|
||||
with_share_data=False):
|
||||
if session is None:
|
||||
session = get_session()
|
||||
result = (
|
||||
model_query(context, models.ShareInstance, session=session).filter_by(
|
||||
id=share_instance_id).first()
|
||||
)
|
||||
result = model_query(
|
||||
context, models.ShareInstance, session=session,
|
||||
).filter_by(
|
||||
id=share_instance_id,
|
||||
).options(
|
||||
joinedload('export_locations'),
|
||||
).first()
|
||||
if result is None:
|
||||
raise exception.NotFound()
|
||||
|
||||
@ -1188,10 +1205,11 @@ def share_instance_get(context, share_instance_id, session=None,
|
||||
@require_admin_context
|
||||
def share_instances_get_all(context):
|
||||
session = get_session()
|
||||
return (
|
||||
model_query(context, models.ShareInstance, session=session,
|
||||
read_deleted="no").all()
|
||||
)
|
||||
return model_query(
|
||||
context, models.ShareInstance, session=session, read_deleted="no",
|
||||
).options(
|
||||
joinedload('export_locations'),
|
||||
).all()
|
||||
|
||||
|
||||
@require_context
|
||||
@ -1200,15 +1218,11 @@ def share_instance_delete(context, instance_id, session=None):
|
||||
session = get_session()
|
||||
|
||||
with session.begin():
|
||||
share_export_locations_update(context, instance_id, [], delete=True)
|
||||
instance_ref = share_instance_get(context, instance_id,
|
||||
session=session)
|
||||
instance_ref.soft_delete(session=session, update_status=True)
|
||||
|
||||
session.query(models.ShareInstanceExportLocations).filter_by(
|
||||
share_instance_id=instance_id).soft_delete()
|
||||
|
||||
share = share_get(context, instance_ref['share_id'], session=session)
|
||||
|
||||
if len(share.instances) == 0:
|
||||
share.soft_delete(session=session)
|
||||
share_access_delete_all_by_share(context, share['id'])
|
||||
@ -2019,29 +2033,90 @@ def _share_metadata_get_item(context, share_id, key, session=None):
|
||||
return result
|
||||
|
||||
|
||||
#################################
|
||||
############################
|
||||
# Export locations functions
|
||||
############################
|
||||
|
||||
def _share_export_locations_get(context, share_instance_ids,
|
||||
include_admin_only=True, session=None):
|
||||
session = session or get_session()
|
||||
|
||||
if not isinstance(share_instance_ids, (set, list, tuple)):
|
||||
share_instance_ids = (share_instance_ids, )
|
||||
|
||||
query = model_query(
|
||||
context,
|
||||
models.ShareInstanceExportLocations,
|
||||
session=session,
|
||||
read_deleted="no",
|
||||
).filter(
|
||||
models.ShareInstanceExportLocations.share_instance_id.in_(
|
||||
share_instance_ids),
|
||||
).order_by(
|
||||
"updated_at",
|
||||
).options(
|
||||
joinedload("_el_metadata_bare"),
|
||||
)
|
||||
|
||||
if not include_admin_only:
|
||||
query = query.filter_by(is_admin_only=False)
|
||||
return query.all()
|
||||
|
||||
|
||||
@require_context
|
||||
@require_share_exists
|
||||
def share_export_locations_get_by_share_id(context, share_id,
|
||||
include_admin_only=True):
|
||||
share = share_get(context, share_id)
|
||||
ids = [instance.id for instance in share.instances]
|
||||
rows = _share_export_locations_get(
|
||||
context, ids, include_admin_only=include_admin_only)
|
||||
return rows
|
||||
|
||||
|
||||
@require_context
|
||||
@require_share_instance_exists
|
||||
def share_export_locations_get_by_share_instance_id(context,
|
||||
share_instance_id):
|
||||
rows = _share_export_locations_get(
|
||||
context, [share_instance_id], include_admin_only=True)
|
||||
return rows
|
||||
|
||||
|
||||
@require_context
|
||||
@require_share_exists
|
||||
def share_export_locations_get(context, share_id):
|
||||
# NOTE(vponomaryov): this method is kept for compatibility with
|
||||
# old approach. New one uses 'share_export_locations_get_by_share_id'.
|
||||
# Which returns list of dicts instead of list of strings, as this one does.
|
||||
share = share_get(context, share_id)
|
||||
rows = _share_export_locations_get(context, share.instance.id)
|
||||
rows = _share_export_locations_get(
|
||||
context, share.instance.id, context.is_admin)
|
||||
|
||||
return [location['path'] for location in rows]
|
||||
|
||||
|
||||
def _share_export_locations_get(context, share_instance_id, session=None):
|
||||
if not session:
|
||||
session = get_session()
|
||||
@require_context
|
||||
def share_export_location_get_by_uuid(context, export_location_uuid,
|
||||
session=None):
|
||||
session = session or get_session()
|
||||
|
||||
return (
|
||||
model_query(context, models.ShareInstanceExportLocations,
|
||||
session=session, read_deleted="no").
|
||||
filter_by(share_instance_id=share_instance_id).
|
||||
order_by("updated_at").
|
||||
all()
|
||||
query = model_query(
|
||||
context,
|
||||
models.ShareInstanceExportLocations,
|
||||
session=session,
|
||||
read_deleted="no",
|
||||
).filter_by(
|
||||
uuid=export_location_uuid,
|
||||
).options(
|
||||
joinedload("_el_metadata_bare"),
|
||||
)
|
||||
|
||||
result = query.first()
|
||||
if not result:
|
||||
raise exception.ExportLocationNotFound(uuid=export_location_uuid)
|
||||
return result
|
||||
|
||||
|
||||
@require_context
|
||||
@oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True)
|
||||
@ -2049,67 +2124,177 @@ def share_export_locations_update(context, share_instance_id, export_locations,
|
||||
delete):
|
||||
# NOTE(u_glide):
|
||||
# Backward compatibility code for drivers,
|
||||
# which returns single export_location as string
|
||||
if not isinstance(export_locations, list):
|
||||
export_locations = [export_locations]
|
||||
# which return single export_location as string
|
||||
if not isinstance(export_locations, (list, tuple, set)):
|
||||
export_locations = (export_locations, )
|
||||
export_locations_as_dicts = []
|
||||
for el in export_locations:
|
||||
# NOTE(vponomaryov): transform old export locations view to new one
|
||||
export_location = el
|
||||
if isinstance(el, six.string_types):
|
||||
export_location = {
|
||||
"path": el,
|
||||
"is_admin_only": False,
|
||||
"metadata": {},
|
||||
}
|
||||
elif isinstance(export_location, dict):
|
||||
if 'metadata' not in export_location:
|
||||
export_location['metadata'] = {}
|
||||
else:
|
||||
raise exception.ManilaException(
|
||||
_("Wrong export location type '%s'.") % type(export_location))
|
||||
export_locations_as_dicts.append(export_location)
|
||||
export_locations = export_locations_as_dicts
|
||||
|
||||
export_locations_paths = [el['path'] for el in export_locations]
|
||||
|
||||
session = get_session()
|
||||
|
||||
with session.begin():
|
||||
location_rows = _share_export_locations_get(
|
||||
context, share_instance_id, session=session)
|
||||
current_el_rows = _share_export_locations_get(
|
||||
context, share_instance_id, session=session)
|
||||
|
||||
def get_path_list_from_rows(rows):
|
||||
return set([l['path'] for l in rows])
|
||||
def get_path_list_from_rows(rows):
|
||||
return set([l['path'] for l in rows])
|
||||
|
||||
current_locations = get_path_list_from_rows(location_rows)
|
||||
current_el_paths = get_path_list_from_rows(current_el_rows)
|
||||
|
||||
def create_indexed_time_dict(key_list):
|
||||
base = timeutils.utcnow()
|
||||
return {
|
||||
# NOTE(u_glide): Incrementing timestamp by microseconds to make
|
||||
# timestamp order match index order.
|
||||
key: base + datetime.timedelta(microseconds=index)
|
||||
for index, key in enumerate(key_list)
|
||||
}
|
||||
def create_indexed_time_dict(key_list):
|
||||
base = timeutils.utcnow()
|
||||
return {
|
||||
# NOTE(u_glide): Incrementing timestamp by microseconds to make
|
||||
# timestamp order match index order.
|
||||
key: base + datetime.timedelta(microseconds=index)
|
||||
for index, key in enumerate(key_list)
|
||||
}
|
||||
|
||||
indexed_update_time = create_indexed_time_dict(export_locations)
|
||||
indexed_update_time = create_indexed_time_dict(export_locations_paths)
|
||||
|
||||
for location in location_rows:
|
||||
if delete and location['path'] not in export_locations:
|
||||
location.soft_delete(session)
|
||||
else:
|
||||
updated_at = indexed_update_time[location['path']]
|
||||
location.update({
|
||||
'updated_at': updated_at,
|
||||
'deleted': 0,
|
||||
})
|
||||
|
||||
location.save(session=session)
|
||||
|
||||
# Now add new export locations
|
||||
for path in export_locations:
|
||||
if path in current_locations:
|
||||
# Already updated
|
||||
continue
|
||||
|
||||
location_ref = models.ShareInstanceExportLocations()
|
||||
location_ref.update({
|
||||
'path': path,
|
||||
'share_instance_id': share_instance_id,
|
||||
'updated_at': indexed_update_time[path],
|
||||
for el in current_el_rows:
|
||||
if delete and el['path'] not in export_locations_paths:
|
||||
export_location_metadata_delete(context, el['uuid'])
|
||||
el.soft_delete(session)
|
||||
else:
|
||||
updated_at = indexed_update_time[el['path']]
|
||||
el.update({
|
||||
'updated_at': updated_at,
|
||||
'deleted': 0,
|
||||
})
|
||||
location_ref.save(session=session)
|
||||
el.save(session=session)
|
||||
if el['el_metadata']:
|
||||
export_location_metadata_update(
|
||||
context, el['uuid'], el['el_metadata'], session=session)
|
||||
|
||||
if delete:
|
||||
return export_locations
|
||||
# Now add new export locations
|
||||
for el in export_locations:
|
||||
if el['path'] in current_el_paths:
|
||||
# Already updated
|
||||
continue
|
||||
|
||||
return get_path_list_from_rows(_share_export_locations_get(
|
||||
context, share_instance_id, session=session))
|
||||
location_ref = models.ShareInstanceExportLocations()
|
||||
location_ref.update({
|
||||
'uuid': uuidutils.generate_uuid(),
|
||||
'path': el['path'],
|
||||
'share_instance_id': share_instance_id,
|
||||
'updated_at': indexed_update_time[el['path']],
|
||||
'deleted': 0,
|
||||
'is_admin_only': el.get('is_admin_only', False),
|
||||
})
|
||||
location_ref.save(session=session)
|
||||
if not el.get('metadata'):
|
||||
continue
|
||||
export_location_metadata_update(
|
||||
context, location_ref['uuid'], el.get('metadata'), session=session)
|
||||
|
||||
return get_path_list_from_rows(_share_export_locations_get(
|
||||
context, share_instance_id, session=session))
|
||||
|
||||
|
||||
#################################
|
||||
#####################################
|
||||
# Export locations metadata functions
|
||||
#####################################
|
||||
|
||||
def _export_location_metadata_get_query(context, export_location_uuid,
|
||||
session=None):
|
||||
session = session or get_session()
|
||||
export_location_id = share_export_location_get_by_uuid(
|
||||
context, export_location_uuid).id
|
||||
|
||||
return model_query(
|
||||
context, models.ShareInstanceExportLocationsMetadata, session=session,
|
||||
read_deleted="no",
|
||||
).filter_by(
|
||||
export_location_id=export_location_id,
|
||||
)
|
||||
|
||||
|
||||
@require_context
|
||||
def export_location_metadata_get(context, export_location_uuid, session=None):
|
||||
rows = _export_location_metadata_get_query(
|
||||
context, export_location_uuid, session=session).all()
|
||||
result = {}
|
||||
for row in rows:
|
||||
result[row["key"]] = row["value"]
|
||||
return result
|
||||
|
||||
|
||||
@require_context
|
||||
def export_location_metadata_delete(context, export_location_uuid, keys=None):
|
||||
session = get_session()
|
||||
metadata = _export_location_metadata_get_query(
|
||||
context, export_location_uuid, session=session,
|
||||
)
|
||||
# NOTE(vponomaryov): if keys is None then we delete all metadata.
|
||||
if keys is not None:
|
||||
keys = keys if isinstance(keys, (list, set, tuple)) else (keys, )
|
||||
metadata = metadata.filter(
|
||||
models.ShareInstanceExportLocationsMetadata.key.in_(keys))
|
||||
metadata = metadata.all()
|
||||
for meta_ref in metadata:
|
||||
meta_ref.soft_delete(session=session)
|
||||
|
||||
|
||||
@require_context
|
||||
@oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True)
|
||||
def export_location_metadata_update(context, export_location_uuid, metadata,
|
||||
delete=False, session=None):
|
||||
session = session or get_session()
|
||||
if delete:
|
||||
original_metadata = export_location_metadata_get(
|
||||
context, export_location_uuid, session=session)
|
||||
keys_for_deletion = set(original_metadata).difference(metadata)
|
||||
if keys_for_deletion:
|
||||
export_location_metadata_delete(
|
||||
context, export_location_uuid, keys=keys_for_deletion)
|
||||
|
||||
el = share_export_location_get_by_uuid(context, export_location_uuid)
|
||||
for meta_key, meta_value in metadata.items():
|
||||
# NOTE(vponomaryov): we should use separate session
|
||||
# for each meta_ref because of autoincrement of integer primary key
|
||||
# that will not take effect using one session and we will rewrite,
|
||||
# in that case, single record - first one added with this call.
|
||||
session = get_session()
|
||||
item = {"value": meta_value, "updated_at": timeutils.utcnow()}
|
||||
|
||||
meta_ref = _export_location_metadata_get_query(
|
||||
context, export_location_uuid, session=session,
|
||||
).filter_by(
|
||||
key=meta_key,
|
||||
).first()
|
||||
|
||||
if not meta_ref:
|
||||
meta_ref = models.ShareInstanceExportLocationsMetadata()
|
||||
item.update({
|
||||
"key": meta_key,
|
||||
"export_location_id": el.id,
|
||||
})
|
||||
|
||||
meta_ref.update(item)
|
||||
meta_ref.save(session=session)
|
||||
|
||||
return metadata
|
||||
|
||||
|
||||
###################################
|
||||
|
||||
|
||||
@require_context
|
||||
|
@ -363,13 +363,52 @@ class ShareInstance(BASE, ManilaBase):
|
||||
|
||||
|
||||
class ShareInstanceExportLocations(BASE, ManilaBase):
|
||||
"""Represents export locations of shares."""
|
||||
"""Represents export locations of share instances."""
|
||||
__tablename__ = 'share_instance_export_locations'
|
||||
|
||||
_extra_keys = ['el_metadata', ]
|
||||
|
||||
@property
|
||||
def el_metadata(self):
|
||||
el_metadata = {}
|
||||
for meta in self._el_metadata_bare: # pylint: disable=E1101
|
||||
el_metadata[meta['key']] = meta['value']
|
||||
return el_metadata
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
uuid = Column(String(36), nullable=False, unique=True)
|
||||
share_instance_id = Column(
|
||||
String(36), ForeignKey('share_instances.id'), nullable=False)
|
||||
path = Column(String(2000))
|
||||
is_admin_only = Column(Boolean, default=False, nullable=False)
|
||||
|
||||
|
||||
class ShareInstanceExportLocationsMetadata(BASE, ManilaBase):
|
||||
"""Represents export location metadata of share instances."""
|
||||
__tablename__ = "share_instance_export_locations_metadata"
|
||||
|
||||
_extra_keys = ['export_location_uuid', ]
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
export_location_id = Column(
|
||||
Integer,
|
||||
ForeignKey("share_instance_export_locations.id"), nullable=False)
|
||||
key = Column(String(255), nullable=False)
|
||||
value = Column(String(1023), nullable=False)
|
||||
export_location = orm.relationship(
|
||||
ShareInstanceExportLocations,
|
||||
backref="_el_metadata_bare",
|
||||
foreign_keys=export_location_id,
|
||||
lazy='immediate',
|
||||
primaryjoin="and_("
|
||||
"%(cls_name)s.export_location_id == "
|
||||
"ShareInstanceExportLocations.id,"
|
||||
"%(cls_name)s.deleted == 0)" % {
|
||||
"cls_name": "ShareInstanceExportLocationsMetadata"})
|
||||
|
||||
@property
|
||||
def export_location_uuid(self):
|
||||
return self.export_location.uuid # pylint: disable=E1101
|
||||
|
||||
|
||||
class ShareTypes(BASE, ManilaBase):
|
||||
|
@ -422,6 +422,10 @@ class ShareBackendException(ManilaException):
|
||||
message = _("Share backend error: %(msg)s.")
|
||||
|
||||
|
||||
class ExportLocationNotFound(NotFound):
|
||||
message = _("Export location %(uuid)s could not be found.")
|
||||
|
||||
|
||||
class ShareSnapshotNotFound(NotFound):
|
||||
message = _("Snapshot %(snapshot_id)s could not be found.")
|
||||
|
||||
|
@ -289,10 +289,29 @@ class API(base.Base):
|
||||
# NOTE(ameade): Do not cast to driver if creating from cgsnapshot
|
||||
return
|
||||
|
||||
share_dict = share.to_dict()
|
||||
share_dict.update(
|
||||
{'metadata': self.db.share_metadata_get(context, share['id'])}
|
||||
)
|
||||
share_properties = {
|
||||
'size': share['size'],
|
||||
'user_id': share['user_id'],
|
||||
'project_id': share['project_id'],
|
||||
'metadata': self.db.share_metadata_get(context, share['id']),
|
||||
'share_server_id': share['share_server_id'],
|
||||
'snapshot_support': share['snapshot_support'],
|
||||
'share_proto': share['share_proto'],
|
||||
'share_type_id': share['share_type_id'],
|
||||
'is_public': share['is_public'],
|
||||
'consistency_group_id': share['consistency_group_id'],
|
||||
'source_cgsnapshot_member_id': share[
|
||||
'source_cgsnapshot_member_id'],
|
||||
'snapshot_id': share['snapshot_id'],
|
||||
}
|
||||
share_instance_properties = {
|
||||
'availability_zone_id': share_instance['availability_zone_id'],
|
||||
'share_network_id': share_instance['share_network_id'],
|
||||
'share_server_id': share_instance['share_server_id'],
|
||||
'share_id': share_instance['share_id'],
|
||||
'host': share_instance['host'],
|
||||
'status': share_instance['status'],
|
||||
}
|
||||
|
||||
share_type = None
|
||||
if share['share_type_id']:
|
||||
@ -300,8 +319,8 @@ class API(base.Base):
|
||||
context, share['share_type_id'])
|
||||
|
||||
request_spec = {
|
||||
'share_properties': share_dict,
|
||||
'share_instance_properties': share_instance.to_dict(),
|
||||
'share_properties': share_properties,
|
||||
'share_instance_properties': share_instance_properties,
|
||||
'share_proto': share['share_proto'],
|
||||
'share_id': share['id'],
|
||||
'snapshot_id': share['snapshot_id'],
|
||||
@ -599,8 +618,31 @@ class API(base.Base):
|
||||
share_type_id = share['share_type_id']
|
||||
if share_type_id:
|
||||
share_type = share_types.get_share_type(context, share_type_id)
|
||||
request_spec = {'share_properties': share,
|
||||
'share_instance_properties': share_instance.to_dict(),
|
||||
|
||||
share_properties = {
|
||||
'size': share['size'],
|
||||
'user_id': share['user_id'],
|
||||
'project_id': share['project_id'],
|
||||
'share_server_id': share['share_server_id'],
|
||||
'snapshot_support': share['snapshot_support'],
|
||||
'share_proto': share['share_proto'],
|
||||
'share_type_id': share['share_type_id'],
|
||||
'is_public': share['is_public'],
|
||||
'consistency_group_id': share['consistency_group_id'],
|
||||
'source_cgsnapshot_member_id': share[
|
||||
'source_cgsnapshot_member_id'],
|
||||
'snapshot_id': share['snapshot_id'],
|
||||
}
|
||||
share_instance_properties = {
|
||||
'availability_zone_id': share_instance['availability_zone_id'],
|
||||
'share_network_id': share_instance['share_network_id'],
|
||||
'share_server_id': share_instance['share_server_id'],
|
||||
'share_id': share_instance['share_id'],
|
||||
'host': share_instance['host'],
|
||||
'status': share_instance['status'],
|
||||
}
|
||||
request_spec = {'share_properties': share_properties,
|
||||
'share_instance_properties': share_instance_properties,
|
||||
'share_type': share_type,
|
||||
'share_id': share['id']}
|
||||
|
||||
|
@ -242,7 +242,15 @@ class GenericShareDriver(driver.ExecuteMixin, driver.ShareDriver):
|
||||
location = helper.create_export(
|
||||
server_details,
|
||||
share['name'])
|
||||
return location
|
||||
return {
|
||||
"path": location,
|
||||
"is_admin_only": False,
|
||||
"metadata": {
|
||||
# TODO(vponomaryov): remove this fake metadata when proper
|
||||
# appears.
|
||||
"export_location_metadata_example": "example",
|
||||
},
|
||||
}
|
||||
|
||||
def _format_device(self, server_details, volume):
|
||||
"""Formats device attached to the service vm."""
|
||||
|
@ -573,6 +573,13 @@ class ShareManager(manager.SchedulerDependentManager):
|
||||
|
||||
share_server = self._get_share_server(ctxt.elevated(),
|
||||
share_instance)
|
||||
share_server = {
|
||||
'id': share_server['id'],
|
||||
'share_network_id': share_server['share_network_id'],
|
||||
'host': share_server['host'],
|
||||
'status': share_server['status'],
|
||||
'backend_details': share_server['backend_details'],
|
||||
} if share_server else share_server
|
||||
|
||||
dest_driver_migration_info = rpcapi.get_driver_migration_info(
|
||||
ctxt, share_instance, share_server)
|
||||
@ -663,6 +670,13 @@ class ShareManager(manager.SchedulerDependentManager):
|
||||
share_instance)
|
||||
new_share_server = self._get_share_server(context.elevated(),
|
||||
new_share_instance)
|
||||
new_share_server = {
|
||||
'id': new_share_server['id'],
|
||||
'share_network_id': new_share_server['share_network_id'],
|
||||
'host': new_share_server['host'],
|
||||
'status': new_share_server['status'],
|
||||
'backend_details': new_share_server['backend_details'],
|
||||
} if new_share_server else new_share_server
|
||||
|
||||
src_migration_info = self.driver.get_migration_info(
|
||||
context, share_instance, share_server)
|
||||
|
152
manila/tests/api/v2/test_share_export_locations.py
Normal file
152
manila/tests/api/v2/test_share_export_locations.py
Normal file
@ -0,0 +1,152 @@
|
||||
# Copyright (c) 2015 Mirantis Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import ddt
|
||||
import mock
|
||||
from webob import exc
|
||||
|
||||
from manila.api.v2 import share_export_locations as export_locations
|
||||
from manila import context
|
||||
from manila import db
|
||||
from manila import exception
|
||||
from manila import policy
|
||||
from manila import test
|
||||
from manila.tests.api import fakes
|
||||
from manila.tests import db_utils
|
||||
|
||||
|
||||
@ddt.ddt
|
||||
class ShareExportLocationsAPITest(test.TestCase):
|
||||
|
||||
def _get_request(self, version="2.9", use_admin_context=True):
|
||||
req = fakes.HTTPRequest.blank(
|
||||
'/v2/shares/%s/export_locations' % self.share_instance_id,
|
||||
version=version, use_admin_context=use_admin_context)
|
||||
return req
|
||||
|
||||
def setUp(self):
|
||||
super(self.__class__, self).setUp()
|
||||
self.controller = (
|
||||
export_locations.ShareExportLocationController())
|
||||
self.resource_name = self.controller.resource_name
|
||||
self.ctxt = {
|
||||
'admin': context.RequestContext('admin', 'fake', True),
|
||||
'user': context.RequestContext('fake', 'fake'),
|
||||
}
|
||||
self.mock_policy_check = self.mock_object(
|
||||
policy, 'check_policy', mock.Mock(return_value=True))
|
||||
self.share = db_utils.create_share()
|
||||
self.share_instance_id = self.share.instance.id
|
||||
self.req = self._get_request()
|
||||
paths = ['fake1/1/', 'fake2/2', 'fake3/3']
|
||||
db.share_export_locations_update(
|
||||
self.ctxt['admin'], self.share_instance_id, paths, False)
|
||||
|
||||
@ddt.data('admin', 'user')
|
||||
def test_list_and_show(self, role):
|
||||
req = self._get_request(use_admin_context=(role == 'admin'))
|
||||
index_result = self.controller.index(req, self.share['id'])
|
||||
|
||||
self.assertIn('export_locations', index_result)
|
||||
self.assertEqual(1, len(index_result))
|
||||
self.assertEqual(3, len(index_result['export_locations']))
|
||||
|
||||
for index_el in index_result['export_locations']:
|
||||
self.assertIn('uuid', index_el)
|
||||
show_result = self.controller.show(
|
||||
req, self.share['id'], index_el['uuid'])
|
||||
self.assertIn('export_location', show_result)
|
||||
self.assertEqual(1, len(show_result))
|
||||
expected_keys = [
|
||||
'created_at', 'updated_at', 'uuid', 'path',
|
||||
]
|
||||
if role == 'admin':
|
||||
expected_keys.extend(['share_instance_id', 'is_admin_only'])
|
||||
for el in (index_el, show_result['export_location']):
|
||||
self.assertEqual(len(expected_keys), len(el))
|
||||
for key in expected_keys:
|
||||
self.assertIn(key, el)
|
||||
|
||||
for key in expected_keys:
|
||||
self.assertEqual(
|
||||
index_el[key], show_result['export_location'][key])
|
||||
|
||||
def test_list_export_locations_share_not_found(self):
|
||||
self.assertRaises(
|
||||
exc.HTTPNotFound,
|
||||
self.controller.index,
|
||||
self.req, 'inexistent_share_id',
|
||||
)
|
||||
|
||||
def test_show_export_location_share_not_found(self):
|
||||
index_result = self.controller.index(self.req, self.share['id'])
|
||||
el_uuid = index_result['export_locations'][0]['uuid']
|
||||
self.assertRaises(
|
||||
exc.HTTPNotFound,
|
||||
self.controller.show,
|
||||
self.req, 'inexistent_share_id', el_uuid,
|
||||
)
|
||||
|
||||
def test_show_export_location_not_found(self):
|
||||
self.assertRaises(
|
||||
exc.HTTPNotFound,
|
||||
self.controller.show,
|
||||
self.req, self.share['id'], 'inexistent_export_location',
|
||||
)
|
||||
|
||||
def test_get_admin_export_location(self):
|
||||
el_data = {
|
||||
'path': '/admin/export/location',
|
||||
'is_admin_only': True,
|
||||
'metadata': {'foo': 'bar'},
|
||||
}
|
||||
db.share_export_locations_update(
|
||||
self.ctxt['admin'], self.share_instance_id, el_data, True)
|
||||
index_result = self.controller.index(self.req, self.share['id'])
|
||||
el_uuid = index_result['export_locations'][0]['uuid']
|
||||
|
||||
# Not found for member
|
||||
member_req = self._get_request(use_admin_context=False)
|
||||
self.assertRaises(
|
||||
exc.HTTPForbidden,
|
||||
self.controller.show,
|
||||
member_req, self.share['id'], el_uuid,
|
||||
)
|
||||
|
||||
# Ok for admin
|
||||
el = self.controller.show(self.req, self.share['id'], el_uuid)
|
||||
for k, v in el.items():
|
||||
self.assertEqual(v, el[k])
|
||||
|
||||
@ddt.data('1.0', '2.0', '2.8')
|
||||
def test_list_with_unsupported_version(self, version):
|
||||
self.assertRaises(
|
||||
exception.VersionNotFoundForAPIMethod,
|
||||
self.controller.index,
|
||||
self._get_request(version),
|
||||
self.share_instance_id,
|
||||
)
|
||||
|
||||
@ddt.data('1.0', '2.0', '2.8')
|
||||
def test_show_with_unsupported_version(self, version):
|
||||
index_result = self.controller.index(self.req, self.share['id'])
|
||||
|
||||
self.assertRaises(
|
||||
exception.VersionNotFoundForAPIMethod,
|
||||
self.controller.show,
|
||||
self._get_request(version),
|
||||
self.share['id'],
|
||||
index_result['export_locations'][0]['uuid']
|
||||
)
|
121
manila/tests/api/v2/test_share_instance_export_locations.py
Normal file
121
manila/tests/api/v2/test_share_instance_export_locations.py
Normal file
@ -0,0 +1,121 @@
|
||||
# Copyright (c) 2015 Mirantis Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import ddt
|
||||
import mock
|
||||
from webob import exc
|
||||
|
||||
from manila.api.v2 import share_instance_export_locations as export_locations
|
||||
from manila import context
|
||||
from manila import db
|
||||
from manila import exception
|
||||
from manila import policy
|
||||
from manila import test
|
||||
from manila.tests.api import fakes
|
||||
from manila.tests import db_utils
|
||||
|
||||
|
||||
@ddt.ddt
|
||||
class ShareInstanceExportLocationsAPITest(test.TestCase):
|
||||
|
||||
def _get_request(self, version="2.9", use_admin_context=True):
|
||||
req = fakes.HTTPRequest.blank(
|
||||
'/v2/share_instances/%s/export_locations' % self.share_instance_id,
|
||||
version=version, use_admin_context=use_admin_context)
|
||||
return req
|
||||
|
||||
def setUp(self):
|
||||
super(self.__class__, self).setUp()
|
||||
self.controller = (
|
||||
export_locations.ShareInstanceExportLocationController())
|
||||
self.resource_name = self.controller.resource_name
|
||||
self.ctxt = {
|
||||
'admin': context.RequestContext('admin', 'fake', True),
|
||||
'user': context.RequestContext('fake', 'fake'),
|
||||
}
|
||||
self.mock_policy_check = self.mock_object(
|
||||
policy, 'check_policy', mock.Mock(return_value=True))
|
||||
self.share = db_utils.create_share()
|
||||
self.share_instance_id = self.share.instance.id
|
||||
self.req = self._get_request()
|
||||
paths = ['fake1/1/', 'fake2/2', 'fake3/3']
|
||||
db.share_export_locations_update(
|
||||
self.ctxt['admin'], self.share_instance_id, paths, False)
|
||||
|
||||
@ddt.data('admin', 'user')
|
||||
def test_list_and_show(self, role):
|
||||
req = self._get_request(use_admin_context=(role == 'admin'))
|
||||
index_result = self.controller.index(req, self.share_instance_id)
|
||||
|
||||
self.assertIn('export_locations', index_result)
|
||||
self.assertEqual(1, len(index_result))
|
||||
self.assertEqual(3, len(index_result['export_locations']))
|
||||
|
||||
for index_el in index_result['export_locations']:
|
||||
self.assertIn('uuid', index_el)
|
||||
show_result = self.controller.show(
|
||||
req, self.share_instance_id, index_el['uuid'])
|
||||
self.assertIn('export_location', show_result)
|
||||
self.assertEqual(1, len(show_result))
|
||||
expected_keys = (
|
||||
'created_at', 'updated_at', 'uuid', 'path',
|
||||
'share_instance_id', 'is_admin_only',
|
||||
)
|
||||
for el in (index_el, show_result['export_location']):
|
||||
self.assertEqual(len(expected_keys), len(el))
|
||||
for key in expected_keys:
|
||||
self.assertIn(key, el)
|
||||
|
||||
for key in expected_keys:
|
||||
self.assertEqual(
|
||||
index_el[key], show_result['export_location'][key])
|
||||
|
||||
def test_list_export_locations_share_instance_not_found(self):
|
||||
self.assertRaises(
|
||||
exc.HTTPNotFound,
|
||||
self.controller.index,
|
||||
self.req, 'inexistent_share_instance_id',
|
||||
)
|
||||
|
||||
def test_show_export_location_share_instance_not_found(self):
|
||||
index_result = self.controller.index(self.req, self.share_instance_id)
|
||||
el_uuid = index_result['export_locations'][0]['uuid']
|
||||
|
||||
self.assertRaises(
|
||||
exc.HTTPNotFound,
|
||||
self.controller.show,
|
||||
self.req, 'inexistent_share_id', el_uuid,
|
||||
)
|
||||
|
||||
@ddt.data('1.0', '2.0', '2.8')
|
||||
def test_list_with_unsupported_version(self, version):
|
||||
self.assertRaises(
|
||||
exception.VersionNotFoundForAPIMethod,
|
||||
self.controller.index,
|
||||
self._get_request(version),
|
||||
self.share_instance_id,
|
||||
)
|
||||
|
||||
@ddt.data('1.0', '2.0', '2.8')
|
||||
def test_show_with_unsupported_version(self, version):
|
||||
index_result = self.controller.index(self.req, self.share_instance_id)
|
||||
|
||||
self.assertRaises(
|
||||
exception.VersionNotFoundForAPIMethod,
|
||||
self.controller.show,
|
||||
self._get_request(version),
|
||||
self.share_instance_id,
|
||||
index_result['export_locations'][0]['uuid']
|
||||
)
|
@ -17,6 +17,7 @@ from oslo_serialization import jsonutils
|
||||
import six
|
||||
from webob import exc as webob_exc
|
||||
|
||||
from manila.api.openstack import api_version_request
|
||||
from manila.api.v2 import share_instances
|
||||
from manila.common import constants
|
||||
from manila import context
|
||||
@ -56,10 +57,10 @@ class ShareInstancesAPITest(test.TestCase):
|
||||
version=version)
|
||||
return instance, req
|
||||
|
||||
def _get_request(self, uri, context=None):
|
||||
def _get_request(self, uri, context=None, version="2.3"):
|
||||
if context is None:
|
||||
context = self.admin_context
|
||||
req = fakes.HTTPRequest.blank('/shares', version="2.3")
|
||||
req = fakes.HTTPRequest.blank('/shares', version=version)
|
||||
req.environ['manila.context'] = context
|
||||
return req
|
||||
|
||||
@ -94,10 +95,37 @@ class ShareInstancesAPITest(test.TestCase):
|
||||
self.mock_policy_check.assert_called_once_with(
|
||||
self.admin_context, self.resource_name, 'show')
|
||||
|
||||
def test_get_share_instances(self):
|
||||
def test_show_with_export_locations(self):
|
||||
test_instance = db_utils.create_share(size=1).instance
|
||||
req = self._get_request('fake', version="2.8")
|
||||
id = test_instance['id']
|
||||
|
||||
actual_result = self.controller.show(req, id)
|
||||
|
||||
self.assertEqual(id, actual_result['share_instance']['id'])
|
||||
self.assertIn("export_location", actual_result['share_instance'])
|
||||
self.assertIn("export_locations", actual_result['share_instance'])
|
||||
self.mock_policy_check.assert_called_once_with(
|
||||
self.admin_context, self.resource_name, 'show')
|
||||
|
||||
def test_show_without_export_locations(self):
|
||||
test_instance = db_utils.create_share(size=1).instance
|
||||
req = self._get_request('fake', version="2.9")
|
||||
id = test_instance['id']
|
||||
|
||||
actual_result = self.controller.show(req, id)
|
||||
|
||||
self.assertEqual(id, actual_result['share_instance']['id'])
|
||||
self.assertNotIn("export_location", actual_result['share_instance'])
|
||||
self.assertNotIn("export_locations", actual_result['share_instance'])
|
||||
self.mock_policy_check.assert_called_once_with(
|
||||
self.admin_context, self.resource_name, 'show')
|
||||
|
||||
@ddt.data("2.3", "2.8", "2.9")
|
||||
def test_get_share_instances(self, version):
|
||||
test_share = db_utils.create_share(size=1)
|
||||
id = test_share['id']
|
||||
req = self._get_request('fake')
|
||||
req = self._get_request('fake', version=version)
|
||||
req_context = req.environ['manila.context']
|
||||
share_policy_check_call = mock.call(
|
||||
req_context, 'share', 'get', mock.ANY)
|
||||
@ -110,6 +138,15 @@ class ShareInstancesAPITest(test.TestCase):
|
||||
[test_share.instance],
|
||||
actual_result['share_instances']
|
||||
)
|
||||
self.assertEqual(1, len(actual_result.get("share_instances", 0)))
|
||||
for instance in actual_result["share_instances"]:
|
||||
if (api_version_request.APIVersionRequest(version) >
|
||||
api_version_request.APIVersionRequest("2.8")):
|
||||
assert_method = self.assertNotIn
|
||||
else:
|
||||
assert_method = self.assertIn
|
||||
assert_method("export_location", instance)
|
||||
assert_method("export_locations", instance)
|
||||
self.mock_policy_check.assert_has_calls([
|
||||
get_instances_policy_check_call, share_policy_check_call])
|
||||
|
||||
|
@ -814,6 +814,19 @@ class ShareAPITest(test.TestCase):
|
||||
expected['shares'][0]['task_state'] = None
|
||||
self._list_detail_test_common(req, expected)
|
||||
|
||||
def test_share_list_detail_without_export_locations(self):
|
||||
env = {'QUERY_STRING': 'name=Share+Test+Name'}
|
||||
req = fakes.HTTPRequest.blank('/shares/detail', environ=env,
|
||||
version="2.9")
|
||||
expected = self._list_detail_common_expected()
|
||||
expected['shares'][0]['consistency_group_id'] = None
|
||||
expected['shares'][0]['source_cgsnapshot_member_id'] = None
|
||||
expected['shares'][0]['task_state'] = None
|
||||
expected['shares'][0]['share_type_name'] = None
|
||||
expected['shares'][0].pop('export_location')
|
||||
expected['shares'][0].pop('export_locations')
|
||||
self._list_detail_test_common(req, expected)
|
||||
|
||||
def test_remove_invalid_options(self):
|
||||
ctx = context.RequestContext('fakeuser', 'fakeproject', is_admin=False)
|
||||
search_opts = {'a': 'a', 'b': 'b', 'c': 'c', 'd': 'd'}
|
||||
|
@ -37,6 +37,7 @@ import abc
|
||||
|
||||
from oslo_utils import uuidutils
|
||||
import six
|
||||
from sqlalchemy import exc as sa_exc
|
||||
|
||||
from manila.db.migrations import utils
|
||||
|
||||
@ -172,3 +173,83 @@ class AvailabilityZoneMigrationChecks(BaseMigrationChecks):
|
||||
self.test_case.assertIn(
|
||||
service.availability_zone, self.valid_az_names
|
||||
)
|
||||
|
||||
|
||||
@map_to_migration('dda6de06349')
|
||||
class ShareInstanceExportLocationMetadataChecks(BaseMigrationChecks):
|
||||
el_table_name = 'share_instance_export_locations'
|
||||
elm_table_name = 'share_instance_export_locations_metadata'
|
||||
|
||||
def setup_upgrade_data(self, engine):
|
||||
# Setup shares
|
||||
share_fixture = [{'id': 'foo_share_id'}, {'id': 'bar_share_id'}]
|
||||
share_table = utils.load_table('shares', engine)
|
||||
for fixture in share_fixture:
|
||||
engine.execute(share_table.insert(fixture))
|
||||
|
||||
# Setup share instances
|
||||
si_fixture = [
|
||||
{'id': 'foo_share_instance_id_oof',
|
||||
'share_id': share_fixture[0]['id']},
|
||||
{'id': 'bar_share_instance_id_rab',
|
||||
'share_id': share_fixture[1]['id']},
|
||||
]
|
||||
si_table = utils.load_table('share_instances', engine)
|
||||
for fixture in si_fixture:
|
||||
engine.execute(si_table.insert(fixture))
|
||||
|
||||
# Setup export locations
|
||||
el_fixture = [
|
||||
{'id': 1, 'path': '/1', 'share_instance_id': si_fixture[0]['id']},
|
||||
{'id': 2, 'path': '/2', 'share_instance_id': si_fixture[1]['id']},
|
||||
]
|
||||
el_table = utils.load_table(self.el_table_name, engine)
|
||||
for fixture in el_fixture:
|
||||
engine.execute(el_table.insert(fixture))
|
||||
|
||||
def check_upgrade(self, engine, data):
|
||||
el_table = utils.load_table(
|
||||
'share_instance_export_locations', engine)
|
||||
for el in engine.execute(el_table.select()):
|
||||
self.test_case.assertTrue(hasattr(el, 'is_admin_only'))
|
||||
self.test_case.assertTrue(hasattr(el, 'uuid'))
|
||||
self.test_case.assertEqual(False, el.is_admin_only)
|
||||
self.test_case.assertTrue(uuidutils.is_uuid_like(el.uuid))
|
||||
|
||||
# Write export location metadata
|
||||
el_metadata = [
|
||||
{'key': 'foo_key', 'value': 'foo_value', 'export_location_id': 1},
|
||||
{'key': 'bar_key', 'value': 'bar_value', 'export_location_id': 2},
|
||||
]
|
||||
elm_table = utils.load_table(self.elm_table_name, engine)
|
||||
engine.execute(elm_table.insert(el_metadata))
|
||||
|
||||
# Verify values of written metadata
|
||||
for el_meta_datum in el_metadata:
|
||||
el_id = el_meta_datum['export_location_id']
|
||||
records = engine.execute(elm_table.select().where(
|
||||
elm_table.c.export_location_id == el_id))
|
||||
self.test_case.assertEqual(1, records.rowcount)
|
||||
record = records.first()
|
||||
|
||||
expected_keys = (
|
||||
'id', 'created_at', 'updated_at', 'deleted_at', 'deleted',
|
||||
'export_location_id', 'key', 'value',
|
||||
)
|
||||
self.test_case.assertEqual(len(expected_keys), len(record.keys()))
|
||||
for key in expected_keys:
|
||||
self.test_case.assertIn(key, record.keys())
|
||||
|
||||
for k, v in el_meta_datum.items():
|
||||
self.test_case.assertTrue(hasattr(record, k))
|
||||
self.test_case.assertEqual(v, getattr(record, k))
|
||||
|
||||
def check_downgrade(self, engine):
|
||||
el_table = utils.load_table(
|
||||
'share_instance_export_locations', engine)
|
||||
for el in engine.execute(el_table.select()):
|
||||
self.test_case.assertFalse(hasattr(el, 'is_admin_only'))
|
||||
self.test_case.assertFalse(hasattr(el, 'uuid'))
|
||||
self.test_case.assertRaises(
|
||||
sa_exc.NoSuchTableError,
|
||||
utils.load_table, self.elm_table_name, engine)
|
||||
|
@ -573,8 +573,7 @@ class ShareSnapshotDatabaseAPITestCase(test.TestCase):
|
||||
class ShareExportLocationsDatabaseAPITestCase(test.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
"""Run before each test."""
|
||||
super(ShareExportLocationsDatabaseAPITestCase, self).setUp()
|
||||
super(self.__class__, self).setUp()
|
||||
self.ctxt = context.get_admin_context()
|
||||
|
||||
def test_update_valid_order(self):
|
||||
@ -605,6 +604,187 @@ class ShareExportLocationsDatabaseAPITestCase(test.TestCase):
|
||||
|
||||
self.assertTrue(actual_result == [initial_location])
|
||||
|
||||
def test_get_admin_export_locations(self):
|
||||
ctxt_user = context.RequestContext(
|
||||
user_id='fake user', project_id='fake project', is_admin=False)
|
||||
share = db_utils.create_share()
|
||||
locations = [
|
||||
{'path': 'fake1/1/', 'is_admin_only': True},
|
||||
{'path': 'fake2/2/', 'is_admin_only': True},
|
||||
{'path': 'fake3/3/', 'is_admin_only': True},
|
||||
]
|
||||
|
||||
db_api.share_export_locations_update(
|
||||
self.ctxt, share.instance['id'], locations, delete=False)
|
||||
|
||||
user_result = db_api.share_export_locations_get(ctxt_user, share['id'])
|
||||
self.assertEqual([], user_result)
|
||||
|
||||
admin_result = db_api.share_export_locations_get(
|
||||
self.ctxt, share['id'])
|
||||
self.assertEqual(3, len(admin_result))
|
||||
for location in locations:
|
||||
self.assertIn(location['path'], admin_result)
|
||||
|
||||
def test_get_user_export_locations(self):
|
||||
ctxt_user = context.RequestContext(
|
||||
user_id='fake user', project_id='fake project', is_admin=False)
|
||||
share = db_utils.create_share()
|
||||
locations = [
|
||||
{'path': 'fake1/1/', 'is_admin_only': False},
|
||||
{'path': 'fake2/2/', 'is_admin_only': False},
|
||||
{'path': 'fake3/3/', 'is_admin_only': False},
|
||||
]
|
||||
|
||||
db_api.share_export_locations_update(
|
||||
self.ctxt, share.instance['id'], locations, delete=False)
|
||||
|
||||
user_result = db_api.share_export_locations_get(ctxt_user, share['id'])
|
||||
self.assertEqual(3, len(user_result))
|
||||
for location in locations:
|
||||
self.assertIn(location['path'], user_result)
|
||||
|
||||
admin_result = db_api.share_export_locations_get(
|
||||
self.ctxt, share['id'])
|
||||
self.assertEqual(3, len(admin_result))
|
||||
for location in locations:
|
||||
self.assertIn(location['path'], admin_result)
|
||||
|
||||
def test_get_user_export_locations_old_view(self):
|
||||
ctxt_user = context.RequestContext(
|
||||
user_id='fake user', project_id='fake project', is_admin=False)
|
||||
share = db_utils.create_share()
|
||||
locations = ['fake1/1/', 'fake2/2', 'fake3/3']
|
||||
|
||||
db_api.share_export_locations_update(
|
||||
self.ctxt, share.instance['id'], locations, delete=False)
|
||||
|
||||
user_result = db_api.share_export_locations_get(ctxt_user, share['id'])
|
||||
self.assertEqual(locations, user_result)
|
||||
|
||||
admin_result = db_api.share_export_locations_get(
|
||||
self.ctxt, share['id'])
|
||||
self.assertEqual(locations, admin_result)
|
||||
|
||||
|
||||
@ddt.ddt
|
||||
class ShareInstanceExportLocationsMetadataDatabaseAPITestCase(test.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(self.__class__, self).setUp()
|
||||
self.ctxt = context.get_admin_context()
|
||||
self.share = db_utils.create_share()
|
||||
self.initial_locations = ['/fake/foo/', '/fake/bar', '/fake/quuz']
|
||||
db_api.share_export_locations_update(
|
||||
self.ctxt, self.share.instance['id'], self.initial_locations,
|
||||
delete=False)
|
||||
|
||||
def _get_export_location_uuid_by_path(self, path):
|
||||
els = db_api.share_export_locations_get_by_share_id(
|
||||
self.ctxt, self.share.id)
|
||||
export_location_uuid = None
|
||||
for el in els:
|
||||
if el.path == path:
|
||||
export_location_uuid = el.uuid
|
||||
self.assertFalse(export_location_uuid is None)
|
||||
return export_location_uuid
|
||||
|
||||
def test_get_export_locations_by_share_id(self):
|
||||
els = db_api.share_export_locations_get_by_share_id(
|
||||
self.ctxt, self.share.id)
|
||||
self.assertEqual(3, len(els))
|
||||
for path in self.initial_locations:
|
||||
self.assertTrue(any([path in el.path for el in els]))
|
||||
|
||||
def test_get_export_locations_by_share_instance_id(self):
|
||||
els = db_api.share_export_locations_get_by_share_instance_id(
|
||||
self.ctxt, self.share.instance.id)
|
||||
self.assertEqual(3, len(els))
|
||||
for path in self.initial_locations:
|
||||
self.assertTrue(any([path in el.path for el in els]))
|
||||
|
||||
def test_export_location_metadata_update_delete(self):
|
||||
export_location_uuid = self._get_export_location_uuid_by_path(
|
||||
self.initial_locations[0])
|
||||
metadata = {
|
||||
'foo_key': 'foo_value',
|
||||
'bar_key': 'bar_value',
|
||||
'quuz_key': 'quuz_value',
|
||||
}
|
||||
|
||||
db_api.export_location_metadata_update(
|
||||
self.ctxt, export_location_uuid, metadata, False)
|
||||
|
||||
db_api.export_location_metadata_delete(
|
||||
self.ctxt, export_location_uuid, list(metadata.keys())[0:-1])
|
||||
|
||||
result = db_api.export_location_metadata_get(
|
||||
self.ctxt, export_location_uuid)
|
||||
|
||||
key = list(metadata.keys())[-1]
|
||||
self.assertEqual({key: metadata[key]}, result)
|
||||
|
||||
db_api.export_location_metadata_delete(
|
||||
self.ctxt, export_location_uuid)
|
||||
|
||||
result = db_api.export_location_metadata_get(
|
||||
self.ctxt, export_location_uuid)
|
||||
self.assertEqual({}, result)
|
||||
|
||||
def test_export_location_metadata_update_get(self):
|
||||
|
||||
# Write metadata for target export location
|
||||
export_location_uuid = self._get_export_location_uuid_by_path(
|
||||
self.initial_locations[0])
|
||||
metadata = {'foo_key': 'foo_value', 'bar_key': 'bar_value'}
|
||||
db_api.export_location_metadata_update(
|
||||
self.ctxt, export_location_uuid, metadata, False)
|
||||
|
||||
# Write metadata for some concurrent export location
|
||||
other_export_location_uuid = self._get_export_location_uuid_by_path(
|
||||
self.initial_locations[1])
|
||||
other_metadata = {'key_from_other_el': 'value_of_key_from_other_el'}
|
||||
db_api.export_location_metadata_update(
|
||||
self.ctxt, other_export_location_uuid, other_metadata, False)
|
||||
|
||||
result = db_api.export_location_metadata_get(
|
||||
self.ctxt, export_location_uuid)
|
||||
|
||||
self.assertEqual(metadata, result)
|
||||
|
||||
updated_metadata = {
|
||||
'foo_key': metadata['foo_key'],
|
||||
'quuz_key': 'quuz_value',
|
||||
}
|
||||
|
||||
db_api.export_location_metadata_update(
|
||||
self.ctxt, export_location_uuid, updated_metadata, True)
|
||||
|
||||
result = db_api.export_location_metadata_get(
|
||||
self.ctxt, export_location_uuid)
|
||||
|
||||
self.assertEqual(updated_metadata, result)
|
||||
|
||||
@ddt.data(
|
||||
("k", "v"),
|
||||
("k" * 256, "v"),
|
||||
("k", "v" * 1024),
|
||||
("k" * 256, "v" * 1024),
|
||||
)
|
||||
@ddt.unpack
|
||||
def test_set_metadata_with_different_length(self, key, value):
|
||||
export_location_uuid = self._get_export_location_uuid_by_path(
|
||||
self.initial_locations[1])
|
||||
metadata = {key: value}
|
||||
|
||||
db_api.export_location_metadata_update(
|
||||
self.ctxt, export_location_uuid, metadata, False)
|
||||
|
||||
result = db_api.export_location_metadata_get(
|
||||
self.ctxt, export_location_uuid)
|
||||
|
||||
self.assertEqual(metadata, result)
|
||||
|
||||
|
||||
@ddt.ddt
|
||||
class DriverPrivateDataDatabaseAPITestCase(test.TestCase):
|
||||
|
@ -33,6 +33,8 @@
|
||||
"share:unmanage": "rule:admin_api",
|
||||
"share:force_delete": "rule:admin_api",
|
||||
"share:reset_status": "rule:admin_api",
|
||||
"share_export_location:index": "rule:default",
|
||||
"share_export_location:show": "rule:default",
|
||||
|
||||
"share_type:index": "rule:default",
|
||||
"share_type:show": "rule:default",
|
||||
@ -53,6 +55,8 @@
|
||||
"share_instance:show": "rule:admin_api",
|
||||
"share_instance:force_delete": "rule:admin_api",
|
||||
"share_instance:reset_status": "rule:admin_api",
|
||||
"share_instance_export_location:index": "rule:admin_api",
|
||||
"share_instance_export_location:show": "rule:admin_api",
|
||||
|
||||
"share_snapshot:force_delete": "rule:admin_api",
|
||||
"share_snapshot:reset_status": "rule:admin_api",
|
||||
|
@ -339,11 +339,16 @@ class GenericShareDriverTestCase(test.TestCase):
|
||||
mock.Mock(return_value=volume2))
|
||||
self.mock_object(self._driver, '_format_device')
|
||||
self.mock_object(self._driver, '_mount_device')
|
||||
expected_el = {
|
||||
'is_admin_only': False,
|
||||
'path': 'fakelocation',
|
||||
'metadata': {'export_location_metadata_example': 'example'},
|
||||
}
|
||||
|
||||
result = self._driver.create_share(
|
||||
self._context, self.share, share_server=self.server)
|
||||
|
||||
self.assertEqual('fakelocation', result)
|
||||
self.assertEqual(expected_el, result)
|
||||
self._driver._allocate_container.assert_called_once_with(
|
||||
self._driver.admin_context, self.share)
|
||||
self._driver._attach_volume.assert_called_once_with(
|
||||
|
@ -1629,10 +1629,32 @@ class ShareAPITestCase(test.TestCase):
|
||||
share = db_utils.create_share(
|
||||
status=constants.STATUS_AVAILABLE,
|
||||
host='fake@backend#pool', share_type_id='fake_type_id')
|
||||
request_spec = {'share_properties': share,
|
||||
'share_instance_properties': share.instance.to_dict(),
|
||||
'share_type': 'fake_type',
|
||||
'share_id': share['id']}
|
||||
request_spec = {
|
||||
'share_properties': {
|
||||
'size': share['size'],
|
||||
'user_id': share['user_id'],
|
||||
'project_id': share['project_id'],
|
||||
'share_server_id': share['share_server_id'],
|
||||
'snapshot_support': share['snapshot_support'],
|
||||
'share_proto': share['share_proto'],
|
||||
'share_type_id': share['share_type_id'],
|
||||
'is_public': share['is_public'],
|
||||
'consistency_group_id': share['consistency_group_id'],
|
||||
'source_cgsnapshot_member_id': share[
|
||||
'source_cgsnapshot_member_id'],
|
||||
'snapshot_id': share['snapshot_id'],
|
||||
},
|
||||
'share_instance_properties': {
|
||||
'availability_zone_id': share.instance['availability_zone_id'],
|
||||
'share_network_id': share.instance['share_network_id'],
|
||||
'share_server_id': share.instance['share_server_id'],
|
||||
'share_id': share.instance['share_id'],
|
||||
'host': share.instance['host'],
|
||||
'status': share.instance['status'],
|
||||
},
|
||||
'share_type': 'fake_type',
|
||||
'share_id': share['id'],
|
||||
}
|
||||
|
||||
self.mock_object(self.scheduler_rpcapi, 'migrate_share_to_host')
|
||||
self.mock_object(share_types, 'get_share_type',
|
||||
|
@ -2473,7 +2473,13 @@ class ShareManagerTestCase(test.TestCase):
|
||||
status_success = {
|
||||
'task_state': constants.STATUS_TASK_STATE_MIGRATION_SUCCESS
|
||||
}
|
||||
share_server = 'fake-share-server'
|
||||
share_server = {
|
||||
'id': 'fake_share_server_id',
|
||||
'share_network_id': 'fake_share_network_id',
|
||||
'host': 'fake_host',
|
||||
'status': 'fake_status',
|
||||
'backend_details': {'foo': 'bar'},
|
||||
}
|
||||
migration_info = 'fake-info'
|
||||
|
||||
manager = self.share_manager
|
||||
@ -2519,7 +2525,13 @@ class ShareManagerTestCase(test.TestCase):
|
||||
status_success = {
|
||||
'task_state': constants.STATUS_TASK_STATE_MIGRATION_SUCCESS
|
||||
}
|
||||
share_server = 'fake-share-server'
|
||||
share_server = {
|
||||
'id': 'fake_share_server_id',
|
||||
'share_network_id': 'fake_share_network_id',
|
||||
'host': 'fake_host',
|
||||
'status': 'fake_status',
|
||||
'backend_details': {'foo': 'bar'},
|
||||
}
|
||||
migration_info = 'fake-info'
|
||||
|
||||
manager = self.share_manager
|
||||
@ -2560,7 +2572,13 @@ class ShareManagerTestCase(test.TestCase):
|
||||
status_success = {
|
||||
'task_state': constants.STATUS_TASK_STATE_MIGRATION_SUCCESS
|
||||
}
|
||||
share_server = 'fake-share-server'
|
||||
share_server = {
|
||||
'id': 'fake_share_server_id',
|
||||
'share_network_id': 'fake_share_network_id',
|
||||
'host': 'fake_host',
|
||||
'status': 'fake_status',
|
||||
'backend_details': {'foo': 'bar'},
|
||||
}
|
||||
migration_info = 'fake-info'
|
||||
|
||||
manager = self.share_manager
|
||||
@ -2605,7 +2623,13 @@ class ShareManagerTestCase(test.TestCase):
|
||||
status_error = {
|
||||
'task_state': constants.STATUS_TASK_STATE_MIGRATION_ERROR
|
||||
}
|
||||
share_server = 'fake-share-server'
|
||||
share_server = {
|
||||
'id': 'fake_share_server_id',
|
||||
'share_network_id': 'fake_share_network_id',
|
||||
'host': 'fake_host',
|
||||
'status': 'fake_status',
|
||||
'backend_details': {'foo': 'bar'},
|
||||
}
|
||||
migration_info = 'fake-info'
|
||||
|
||||
manager = self.share_manager
|
||||
@ -2724,8 +2748,20 @@ class ShareManagerTestCase(test.TestCase):
|
||||
}
|
||||
status_inactive = {'status': constants.STATUS_INACTIVE}
|
||||
status_available = {'status': constants.STATUS_AVAILABLE}
|
||||
share_server = 'fake-server'
|
||||
new_share_server = 'new-fake-server'
|
||||
share_server = {
|
||||
'id': 'fake_share_server_id',
|
||||
'share_network_id': 'fake_share_network_id',
|
||||
'host': 'fake_host',
|
||||
'status': 'fake_status',
|
||||
'backend_details': {'foo': 'bar'},
|
||||
}
|
||||
new_share_server = {
|
||||
'id': 'fake_share_server_id2',
|
||||
'share_network_id': 'fake_share_network_id2',
|
||||
'host': 'fake_host2',
|
||||
'status': 'fake_status2',
|
||||
'backend_details': {'foo2': 'bar2'},
|
||||
}
|
||||
src_migration_info = 'fake-src-migration-info'
|
||||
dest_migration_info = 'fake-dest-migration-info'
|
||||
|
||||
|
@ -462,6 +462,13 @@ class ManilaExceptionResponseCode404(test.TestCase):
|
||||
self.assertEqual(404, e.code)
|
||||
self.assertIn(name, e.msg)
|
||||
|
||||
def test_export_location_not_found(self):
|
||||
# verify response code for exception.ExportLocationNotFound
|
||||
uuid = "fake-export-location-uuid"
|
||||
e = exception.ExportLocationNotFound(uuid=uuid)
|
||||
self.assertEqual(404, e.code)
|
||||
self.assertIn(uuid, e.msg)
|
||||
|
||||
def test_share_resource_not_found(self):
|
||||
# verify response code for exception.ShareResourceNotFound
|
||||
share_id = "fake_share_id"
|
||||
|
@ -36,7 +36,7 @@ ShareGroup = [
|
||||
help="The minimum api microversion is configured to be the "
|
||||
"value of the minimum microversion supported by Manila."),
|
||||
cfg.StrOpt("max_api_microversion",
|
||||
default="2.8",
|
||||
default="2.9",
|
||||
help="The maximum api microversion is configured to be the "
|
||||
"value of the latest microversion supported by Manila."),
|
||||
cfg.StrOpt("region",
|
||||
|
@ -238,6 +238,23 @@ class SharesV2Client(shares_client.SharesClient):
|
||||
self.expected_success(200, resp.status)
|
||||
return self._parse_resp(body)
|
||||
|
||||
def get_share_export_location(
|
||||
self, share_id, export_location_uuid, version=LATEST_MICROVERSION):
|
||||
resp, body = self.get(
|
||||
"shares/%(share_id)s/export_locations/%(el_uuid)s" % {
|
||||
"share_id": share_id, "el_uuid": export_location_uuid},
|
||||
version=version)
|
||||
self.expected_success(200, resp.status)
|
||||
return self._parse_resp(body)
|
||||
|
||||
def list_share_export_locations(
|
||||
self, share_id, version=LATEST_MICROVERSION):
|
||||
resp, body = self.get(
|
||||
"shares/%(share_id)s/export_locations" % {"share_id": share_id},
|
||||
version=version)
|
||||
self.expected_success(200, resp.status)
|
||||
return self._parse_resp(body)
|
||||
|
||||
def delete_share(self, share_id, params=None,
|
||||
version=LATEST_MICROVERSION):
|
||||
uri = "shares/%s" % share_id
|
||||
@ -265,6 +282,24 @@ class SharesV2Client(shares_client.SharesClient):
|
||||
self.expected_success(200, resp.status)
|
||||
return self._parse_resp(body)
|
||||
|
||||
def get_share_instance_export_location(
|
||||
self, instance_id, export_location_uuid,
|
||||
version=LATEST_MICROVERSION):
|
||||
resp, body = self.get(
|
||||
"share_instances/%(instance_id)s/export_locations/%(el_uuid)s" % {
|
||||
"instance_id": instance_id, "el_uuid": export_location_uuid},
|
||||
version=version)
|
||||
self.expected_success(200, resp.status)
|
||||
return self._parse_resp(body)
|
||||
|
||||
def list_share_instance_export_locations(
|
||||
self, instance_id, version=LATEST_MICROVERSION):
|
||||
resp, body = self.get(
|
||||
"share_instances/%s/export_locations" % instance_id,
|
||||
version=version)
|
||||
self.expected_success(200, resp.status)
|
||||
return self._parse_resp(body)
|
||||
|
||||
def wait_for_share_instance_status(self, instance_id, status,
|
||||
version=LATEST_MICROVERSION):
|
||||
"""Waits for a share to reach a given status."""
|
||||
|
143
manila_tempest_tests/tests/api/admin/test_export_locations.py
Normal file
143
manila_tempest_tests/tests/api/admin/test_export_locations.py
Normal file
@ -0,0 +1,143 @@
|
||||
# Copyright 2015 Mirantis Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from oslo_utils import timeutils
|
||||
from oslo_utils import uuidutils
|
||||
import six
|
||||
from tempest import config
|
||||
from tempest import test
|
||||
|
||||
from manila_tempest_tests import clients_share as clients
|
||||
from manila_tempest_tests.tests.api import base
|
||||
|
||||
CONF = config.CONF
|
||||
|
||||
|
||||
@base.skip_if_microversion_not_supported("2.9")
|
||||
class ExportLocationsTest(base.BaseSharesAdminTest):
|
||||
|
||||
@classmethod
|
||||
def resource_setup(cls):
|
||||
super(ExportLocationsTest, cls).resource_setup()
|
||||
cls.admin_client = cls.shares_v2_client
|
||||
cls.member_client = clients.Manager().shares_v2_client
|
||||
cls.share = cls.create_share()
|
||||
cls.share = cls.shares_v2_client.get_share(cls.share['id'])
|
||||
cls.share_instances = cls.shares_v2_client.get_instances_of_share(
|
||||
cls.share['id'])
|
||||
|
||||
def _verify_export_location_structure(self, export_locations,
|
||||
role='admin'):
|
||||
expected_keys = [
|
||||
'created_at', 'updated_at', 'path', 'uuid',
|
||||
]
|
||||
if role == 'admin':
|
||||
expected_keys.extend(['is_admin_only', 'share_instance_id'])
|
||||
|
||||
if not isinstance(export_locations, (list, tuple, set)):
|
||||
export_locations = (export_locations, )
|
||||
|
||||
for export_location in export_locations:
|
||||
self.assertEqual(len(expected_keys), len(export_location))
|
||||
for key in expected_keys:
|
||||
self.assertIn(key, export_location)
|
||||
if role == 'admin':
|
||||
self.assertIn(export_location['is_admin_only'], (True, False))
|
||||
self.assertTrue(
|
||||
uuidutils.is_uuid_like(
|
||||
export_location['share_instance_id']))
|
||||
self.assertTrue(uuidutils.is_uuid_like(export_location['uuid']))
|
||||
self.assertTrue(
|
||||
isinstance(export_location['path'], six.string_types))
|
||||
for time in (export_location['created_at'],
|
||||
export_location['updated_at']):
|
||||
# If var 'time' has incorrect value then ValueError exception
|
||||
# is expected to be raised. So, just try parse it making
|
||||
# assertion that it has proper date value.
|
||||
timeutils.parse_strtime(time)
|
||||
|
||||
@test.attr(type=["gate", ])
|
||||
def test_list_share_export_locations(self):
|
||||
export_locations = self.admin_client.list_share_export_locations(
|
||||
self.share['id'])
|
||||
|
||||
self._verify_export_location_structure(export_locations)
|
||||
|
||||
@test.attr(type=["gate", ])
|
||||
def test_get_share_export_location(self):
|
||||
export_locations = self.admin_client.list_share_export_locations(
|
||||
self.share['id'])
|
||||
|
||||
for export_location in export_locations:
|
||||
el = self.admin_client.get_share_export_location(
|
||||
self.share['id'], export_location['uuid'])
|
||||
self._verify_export_location_structure(el)
|
||||
|
||||
@test.attr(type=["gate", ])
|
||||
def test_list_share_export_locations_by_member(self):
|
||||
export_locations = self.member_client.list_share_export_locations(
|
||||
self.share['id'])
|
||||
|
||||
self._verify_export_location_structure(export_locations, 'member')
|
||||
|
||||
@test.attr(type=["gate", ])
|
||||
def test_get_share_export_location_by_member(self):
|
||||
export_locations = self.admin_client.list_share_export_locations(
|
||||
self.share['id'])
|
||||
|
||||
for export_location in export_locations:
|
||||
el = self.member_client.get_share_export_location(
|
||||
self.share['id'], export_location['uuid'])
|
||||
self._verify_export_location_structure(el, 'member')
|
||||
|
||||
@test.attr(type=["gate", ])
|
||||
def test_list_share_instance_export_locations(self):
|
||||
for share_instance in self.share_instances:
|
||||
export_locations = (
|
||||
self.admin_client.list_share_instance_export_locations(
|
||||
share_instance['id']))
|
||||
self._verify_export_location_structure(export_locations)
|
||||
|
||||
@test.attr(type=["gate", ])
|
||||
def test_get_share_instance_export_location(self):
|
||||
for share_instance in self.share_instances:
|
||||
export_locations = (
|
||||
self.admin_client.list_share_instance_export_locations(
|
||||
share_instance['id']))
|
||||
for el in export_locations:
|
||||
el = self.admin_client.get_share_instance_export_location(
|
||||
share_instance['id'], el['uuid'])
|
||||
self._verify_export_location_structure(el)
|
||||
|
||||
@test.attr(type=["gate", ])
|
||||
def test_share_contains_all_export_locations_of_all_share_instances(self):
|
||||
share_export_locations = self.admin_client.list_share_export_locations(
|
||||
self.share['id'])
|
||||
share_instances_export_locations = []
|
||||
for share_instance in self.share_instances:
|
||||
share_instance_export_locations = (
|
||||
self.admin_client.list_share_instance_export_locations(
|
||||
share_instance['id']))
|
||||
share_instances_export_locations.extend(
|
||||
share_instance_export_locations)
|
||||
|
||||
self.assertEqual(
|
||||
len(share_export_locations),
|
||||
len(share_instances_export_locations)
|
||||
)
|
||||
self.assertEqual(
|
||||
sorted(share_export_locations, key=lambda el: el['uuid']),
|
||||
sorted(share_instances_export_locations, key=lambda el: el['uuid'])
|
||||
)
|
@ -0,0 +1,94 @@
|
||||
# Copyright 2015 Mirantis Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from tempest import config
|
||||
from tempest import test
|
||||
from tempest_lib import exceptions as lib_exc
|
||||
|
||||
from manila_tempest_tests import clients_share as clients
|
||||
from manila_tempest_tests.tests.api import base
|
||||
|
||||
CONF = config.CONF
|
||||
|
||||
|
||||
@base.skip_if_microversion_not_supported("2.9")
|
||||
class ExportLocationsNegativeTest(base.BaseSharesAdminTest):
|
||||
|
||||
@classmethod
|
||||
def resource_setup(cls):
|
||||
super(ExportLocationsNegativeTest, cls).resource_setup()
|
||||
cls.admin_client = cls.shares_v2_client
|
||||
cls.member_client = clients.Manager().shares_v2_client
|
||||
cls.share = cls.create_share()
|
||||
cls.share = cls.shares_v2_client.get_share(cls.share['id'])
|
||||
cls.share_instances = cls.shares_v2_client.get_instances_of_share(
|
||||
cls.share['id'])
|
||||
|
||||
@test.attr(type=["gate", "negative"])
|
||||
def test_get_export_locations_by_inexistent_share(self):
|
||||
self.assertRaises(
|
||||
lib_exc.NotFound,
|
||||
self.admin_client.list_share_export_locations,
|
||||
"fake-inexistent-share-id",
|
||||
)
|
||||
|
||||
@test.attr(type=["gate", "negative"])
|
||||
def test_get_inexistent_share_export_location(self):
|
||||
self.assertRaises(
|
||||
lib_exc.NotFound,
|
||||
self.admin_client.get_share_export_location,
|
||||
self.share['id'],
|
||||
"fake-inexistent-share-instance-id",
|
||||
)
|
||||
|
||||
@test.attr(type=["gate", "negative"])
|
||||
def test_get_export_locations_by_inexistent_share_instance(self):
|
||||
self.assertRaises(
|
||||
lib_exc.NotFound,
|
||||
self.admin_client.list_share_instance_export_locations,
|
||||
"fake-inexistent-share-instance-id",
|
||||
)
|
||||
|
||||
@test.attr(type=["gate", "negative"])
|
||||
def test_get_inexistent_share_instance_export_location(self):
|
||||
for share_instance in self.share_instances:
|
||||
self.assertRaises(
|
||||
lib_exc.NotFound,
|
||||
self.admin_client.get_share_instance_export_location,
|
||||
share_instance['id'],
|
||||
"fake-inexistent-share-instance-id",
|
||||
)
|
||||
|
||||
@test.attr(type=["gate", "negative"])
|
||||
def test_list_share_instance_export_locations_by_member(self):
|
||||
for share_instance in self.share_instances:
|
||||
self.assertRaises(
|
||||
lib_exc.Forbidden,
|
||||
self.member_client.list_share_instance_export_locations,
|
||||
"fake-inexistent-share-instance-id",
|
||||
)
|
||||
|
||||
@test.attr(type=["gate", "negative"])
|
||||
def test_get_share_instance_export_location_by_member(self):
|
||||
for share_instance in self.share_instances:
|
||||
export_locations = (
|
||||
self.admin_client.list_share_instance_export_locations(
|
||||
share_instance['id']))
|
||||
for el in export_locations:
|
||||
self.assertRaises(
|
||||
lib_exc.Forbidden,
|
||||
self.member_client.get_share_instance_export_location,
|
||||
share_instance['id'], el['uuid'],
|
||||
)
|
@ -17,6 +17,7 @@ from tempest import config
|
||||
from tempest import test
|
||||
|
||||
from manila_tempest_tests.tests.api import base
|
||||
from manila_tempest_tests import utils
|
||||
|
||||
CONF = config.CONF
|
||||
|
||||
@ -58,21 +59,31 @@ class ShareInstancesTest(base.BaseSharesAdminTest):
|
||||
msg = 'Share instance for share %s was not found.' % self.share['id']
|
||||
self.assertIn(self.share['id'], share_ids, msg)
|
||||
|
||||
@test.attr(type=["gate", ])
|
||||
def test_get_share_instance_v2_3(self):
|
||||
def _get_share_instance(self, version):
|
||||
"""Test that we get the proper keys back for the instance."""
|
||||
share_instances = self.shares_v2_client.get_instances_of_share(
|
||||
self.share['id'], version='2.3'
|
||||
self.share['id'], version=version,
|
||||
)
|
||||
si = self.shares_v2_client.get_share_instance(share_instances[0]['id'],
|
||||
version='2.3')
|
||||
si = self.shares_v2_client.get_share_instance(
|
||||
share_instances[0]['id'], version=version)
|
||||
|
||||
expected_keys = ['host', 'share_id', 'id', 'share_network_id',
|
||||
'status', 'availability_zone', 'share_server_id',
|
||||
'export_locations', 'export_location', 'created_at']
|
||||
actual_keys = si.keys()
|
||||
self.assertEqual(sorted(expected_keys), sorted(actual_keys),
|
||||
expected_keys = [
|
||||
'host', 'share_id', 'id', 'share_network_id', 'status',
|
||||
'availability_zone', 'share_server_id', 'created_at',
|
||||
]
|
||||
if utils.is_microversion_lt(version, '2.9'):
|
||||
expected_keys.extend(["export_location", "export_locations"])
|
||||
expected_keys = sorted(expected_keys)
|
||||
actual_keys = sorted(si.keys())
|
||||
self.assertEqual(expected_keys, actual_keys,
|
||||
'Share instance %s returned incorrect keys; '
|
||||
'expected %s, got %s.' % (si['id'],
|
||||
sorted(expected_keys),
|
||||
sorted(actual_keys)))
|
||||
'expected %s, got %s.' % (
|
||||
si['id'], expected_keys, actual_keys))
|
||||
|
||||
@test.attr(type=["gate", ])
|
||||
def test_get_share_instance_v2_3(self):
|
||||
self._get_share_instance('2.3')
|
||||
|
||||
@test.attr(type=["gate", ])
|
||||
def test_get_share_instance_v2_9(self):
|
||||
self._get_share_instance('2.9')
|
||||
|
@ -338,7 +338,8 @@ class BaseSharesTest(test.BaseTestCase):
|
||||
def migrate_share(cls, share_id, dest_host, client=None, **kwargs):
|
||||
client = client or cls.shares_v2_client
|
||||
client.migrate_share(share_id, dest_host, **kwargs)
|
||||
share = client.wait_for_migration_completed(share_id, dest_host)
|
||||
share = client.wait_for_migration_completed(
|
||||
share_id, dest_host, version=kwargs.get('version'))
|
||||
return share
|
||||
|
||||
@classmethod
|
||||
|
@ -19,6 +19,7 @@ from tempest_lib import exceptions as lib_exc # noqa
|
||||
import testtools # noqa
|
||||
|
||||
from manila_tempest_tests.tests.api import base
|
||||
from manila_tempest_tests import utils
|
||||
|
||||
CONF = config.CONF
|
||||
|
||||
@ -40,7 +41,7 @@ class SharesNFSTest(base.BaseSharesTest):
|
||||
|
||||
share = self.create_share(self.protocol)
|
||||
detailed_elements = {'name', 'id', 'availability_zone',
|
||||
'description', 'export_location', 'project_id',
|
||||
'description', 'project_id',
|
||||
'host', 'created_at', 'share_proto', 'metadata',
|
||||
'size', 'snapshot_id', 'share_network_id',
|
||||
'status', 'share_type', 'volume_type', 'links',
|
||||
@ -57,6 +58,7 @@ class SharesNFSTest(base.BaseSharesTest):
|
||||
|
||||
# Get share using v 2.1 - we expect key 'snapshot_support' to be absent
|
||||
share_get = self.shares_v2_client.get_share(share['id'], version='2.1')
|
||||
detailed_elements.add('export_location')
|
||||
self.assertTrue(detailed_elements.issubset(share_get.keys()), msg)
|
||||
|
||||
# Get share using v 2.2 - we expect key 'snapshot_support' to exist
|
||||
@ -64,6 +66,14 @@ class SharesNFSTest(base.BaseSharesTest):
|
||||
detailed_elements.add('snapshot_support')
|
||||
self.assertTrue(detailed_elements.issubset(share_get.keys()), msg)
|
||||
|
||||
if utils.is_microversion_supported('2.9'):
|
||||
# Get share using v 2.9 - key 'export_location' is expected
|
||||
# to be absent
|
||||
share_get = self.shares_v2_client.get_share(
|
||||
share['id'], version='2.9')
|
||||
detailed_elements.remove('export_location')
|
||||
self.assertTrue(detailed_elements.issubset(share_get.keys()), msg)
|
||||
|
||||
# Delete share
|
||||
self.shares_v2_client.delete_share(share['id'])
|
||||
self.shares_v2_client.wait_for_resource_deletion(share_id=share['id'])
|
||||
|
@ -82,11 +82,12 @@ class SharesActionsTest(base.BaseSharesTest):
|
||||
# verify keys
|
||||
expected_keys = [
|
||||
"status", "description", "links", "availability_zone",
|
||||
"created_at", "export_location", "project_id",
|
||||
"export_locations", "volume_type", "share_proto", "name",
|
||||
"created_at", "project_id", "volume_type", "share_proto", "name",
|
||||
"snapshot_id", "id", "size", "share_network_id", "metadata",
|
||||
"host", "snapshot_id", "is_public",
|
||||
]
|
||||
if utils.is_microversion_lt(version, '2.9'):
|
||||
expected_keys.extend(["export_location", "export_locations"])
|
||||
if utils.is_microversion_ge(version, '2.2'):
|
||||
expected_keys.append("snapshot_support")
|
||||
if utils.is_microversion_ge(version, '2.4'):
|
||||
@ -130,11 +131,16 @@ class SharesActionsTest(base.BaseSharesTest):
|
||||
def test_get_share_with_share_type_name_key(self):
|
||||
self._get_share('2.6')
|
||||
|
||||
@test.attr(type=["gate", ])
|
||||
@utils.skip_if_microversion_not_supported('2.9')
|
||||
def test_get_share_export_locations_removed(self):
|
||||
self._get_share('2.9')
|
||||
|
||||
@test.attr(type=["gate", ])
|
||||
def test_list_shares(self):
|
||||
|
||||
# list shares
|
||||
shares = self.shares_client.list_shares()
|
||||
shares = self.shares_v2_client.list_shares()
|
||||
|
||||
# verify keys
|
||||
keys = ["name", "id", "links"]
|
||||
@ -155,11 +161,12 @@ class SharesActionsTest(base.BaseSharesTest):
|
||||
# verify keys
|
||||
keys = [
|
||||
"status", "description", "links", "availability_zone",
|
||||
"created_at", "export_location", "project_id",
|
||||
"export_locations", "volume_type", "share_proto", "name",
|
||||
"created_at", "project_id", "volume_type", "share_proto", "name",
|
||||
"snapshot_id", "id", "size", "share_network_id", "metadata",
|
||||
"host", "snapshot_id", "is_public", "share_type",
|
||||
]
|
||||
if utils.is_microversion_lt(version, '2.9'):
|
||||
keys.extend(["export_location", "export_locations"])
|
||||
if utils.is_microversion_ge(version, '2.2'):
|
||||
keys.append("snapshot_support")
|
||||
if utils.is_microversion_ge(version, '2.4'):
|
||||
@ -194,6 +201,11 @@ class SharesActionsTest(base.BaseSharesTest):
|
||||
def test_list_shares_with_detail_share_type_name_key(self):
|
||||
self._list_shares_with_detail('2.6')
|
||||
|
||||
@test.attr(type=["gate", ])
|
||||
@utils.skip_if_microversion_not_supported('2.9')
|
||||
def test_list_shares_with_detail_export_locations_removed(self):
|
||||
self._list_shares_with_detail('2.9')
|
||||
|
||||
@test.attr(type=["gate", ])
|
||||
def test_list_shares_with_detail_filter_by_metadata(self):
|
||||
filters = {'metadata': self.metadata}
|
||||
|
@ -38,6 +38,7 @@ class ShareScenarioTest(manager.NetworkScenarioTest):
|
||||
|
||||
# Manila clients
|
||||
cls.shares_client = clients_share.Manager().shares_client
|
||||
cls.shares_v2_client = clients_share.Manager().shares_v2_client
|
||||
cls.shares_admin_client = clients_share.AdminManager().shares_client
|
||||
cls.shares_admin_v2_client = (
|
||||
clients_share.AdminManager().shares_v2_client)
|
||||
|
@ -20,6 +20,7 @@ from tempest_lib.common.utils import data_utils
|
||||
from tempest_lib import exceptions
|
||||
|
||||
from manila_tempest_tests.tests.scenario import manager_share as manager
|
||||
from manila_tempest_tests import utils
|
||||
|
||||
CONF = config.CONF
|
||||
|
||||
@ -190,6 +191,9 @@ class ShareBasicOpsBase(manager.ShareScenarioTest):
|
||||
instance1 = self.boot_instance()
|
||||
self.allow_access_ip(self.share['id'], instance=instance1)
|
||||
ssh_client_inst1 = self.init_ssh(instance1)
|
||||
|
||||
# TODO(vponomaryov): use separate API for getting export location for
|
||||
# share when "v2" client is used.
|
||||
first_location = self.share['export_locations'][0]
|
||||
self.mount_share(first_location, ssh_client_inst1)
|
||||
self.addCleanup(self.umount_share,
|
||||
@ -235,12 +239,13 @@ class ShareBasicOpsBase(manager.ShareScenarioTest):
|
||||
|
||||
dest_pool = dest_pool['name']
|
||||
|
||||
old_export_location = share['export_locations'][0]
|
||||
|
||||
instance1 = self.boot_instance()
|
||||
self.allow_access_ip(self.share['id'], instance=instance1,
|
||||
cleanup=False)
|
||||
ssh_client = self.init_ssh(instance1)
|
||||
|
||||
# TODO(vponomaryov): use separate API for getting export location for
|
||||
# share when "v2" client is used.
|
||||
first_location = self.share['export_locations'][0]
|
||||
self.mount_share(first_location, ssh_client)
|
||||
|
||||
@ -266,12 +271,19 @@ class ShareBasicOpsBase(manager.ShareScenarioTest):
|
||||
self.umount_share(ssh_client)
|
||||
|
||||
share = self.migrate_share(share['id'], dest_pool)
|
||||
if utils.is_microversion_supported("2.9"):
|
||||
second_location = (
|
||||
self.shares_v2_client.list_share_export_locations(
|
||||
share['id'])[0]['path'])
|
||||
else:
|
||||
# NOTE(vponomaryov): following approach is valid for picking up
|
||||
# export location only using microversions lower than '2.9'.
|
||||
second_location = share['export_locations'][0]
|
||||
|
||||
self.assertEqual(dest_pool, share['host'])
|
||||
self.assertNotEqual(old_export_location, share['export_locations'][0])
|
||||
self.assertNotEqual(first_location, second_location)
|
||||
self.assertEqual('migration_success', share['task_state'])
|
||||
|
||||
second_location = share['export_locations'][0]
|
||||
self.mount_share(second_location, ssh_client)
|
||||
|
||||
output = ssh_client.exec_command("ls -lRA --ignore=lost+found /mnt")
|
||||
|
@ -0,0 +1,6 @@
|
||||
---
|
||||
features:
|
||||
- Added APIs for listing export locations per share and share instances.
|
||||
deprecations:
|
||||
- Removed 'export_location' and 'export_locations' attributes from share
|
||||
and share instance views starting with microversion '2.9'.
|
Loading…
Reference in New Issue
Block a user