Allow shares to have multiple export locations
Many storage controllers actually have more than one "export location" that clients can mount to access any given share. Manila only allows storing 1 of these today, which is a bothersome limitation. Partially implements bp multiple-export-locations Change-Id: Ib9d497e76762d4862bc5c87e6186e1d096b1d2a6
This commit is contained in:
parent
b37f897584
commit
1e2b07294e
@ -37,9 +37,12 @@ LOG = log.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
def make_share(elem):
|
def make_share(elem):
|
||||||
|
# NOTE(u_glide):
|
||||||
|
# export_location is backward-compatibility attribute, which contains first
|
||||||
|
# export location from export_locations list.
|
||||||
attrs = ['id', 'size', 'availability_zone', 'status', 'name',
|
attrs = ['id', 'size', 'availability_zone', 'status', 'name',
|
||||||
'description', 'share_proto', 'export_location', 'links',
|
'description', 'share_proto', 'export_location', 'links',
|
||||||
'snapshot_id', 'created_at', 'metadata']
|
'snapshot_id', 'created_at', 'metadata', 'export_locations']
|
||||||
for attr in attrs:
|
for attr in attrs:
|
||||||
elem.set(attr)
|
elem.set(attr)
|
||||||
|
|
||||||
|
@ -47,6 +47,12 @@ class ViewBuilder(common.ViewBuilder):
|
|||||||
metadata = dict((item['key'], item['value']) for item in metadata)
|
metadata = dict((item['key'], item['value']) for item in metadata)
|
||||||
else:
|
else:
|
||||||
metadata = {}
|
metadata = {}
|
||||||
|
|
||||||
|
export_locations = share.get('export_locations', [])
|
||||||
|
|
||||||
|
if export_locations:
|
||||||
|
export_locations = [item['path'] for item in export_locations]
|
||||||
|
|
||||||
if share['share_type_id'] and share.get('share_type'):
|
if share['share_type_id'] and share.get('share_type'):
|
||||||
share_type = share['share_type']['name']
|
share_type = share['share_type']['name']
|
||||||
else:
|
else:
|
||||||
@ -71,6 +77,7 @@ class ViewBuilder(common.ViewBuilder):
|
|||||||
'volume_type': share_type,
|
'volume_type': share_type,
|
||||||
'links': self._get_links(request, share['id']),
|
'links': self._get_links(request, share['id']),
|
||||||
'is_public': share.get('is_public'),
|
'is_public': share.get('is_public'),
|
||||||
|
'export_locations': export_locations,
|
||||||
}
|
}
|
||||||
if context.is_admin:
|
if context.is_admin:
|
||||||
share_dict['share_server_id'] = share.get('share_server_id')
|
share_dict['share_server_id'] = share.get('share_server_id')
|
||||||
|
@ -489,6 +489,22 @@ def share_metadata_update(context, share, metadata, delete):
|
|||||||
|
|
||||||
|
|
||||||
###################
|
###################
|
||||||
|
|
||||||
|
def share_export_locations_get(context, share_id):
|
||||||
|
"""Get all exports_locations of share."""
|
||||||
|
return IMPL.share_export_locations_get(context, share_id)
|
||||||
|
|
||||||
|
|
||||||
|
def share_export_locations_update(context, share_id, export_locations,
|
||||||
|
delete=True):
|
||||||
|
"""Update export locations of share."""
|
||||||
|
return IMPL.share_export_locations_update(context, share_id,
|
||||||
|
export_locations, delete)
|
||||||
|
|
||||||
|
|
||||||
|
####################
|
||||||
|
|
||||||
|
|
||||||
def share_network_create(context, values):
|
def share_network_create(context, values):
|
||||||
"""Create a share network DB record."""
|
"""Create a share network DB record."""
|
||||||
return IMPL.share_network_create(context, values)
|
return IMPL.share_network_create(context, values)
|
||||||
|
@ -0,0 +1,81 @@
|
|||||||
|
# Copyright 2015 Mirantis Inc.
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
"""Add share_export_locations table
|
||||||
|
|
||||||
|
Revision ID: 56cdbe267881
|
||||||
|
Revises: 17115072e1c3
|
||||||
|
Create Date: 2015-02-27 14:06:30.464315
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '56cdbe267881'
|
||||||
|
down_revision = '30cb96d995fa'
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sql
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.create_table(
|
||||||
|
'share_export_locations',
|
||||||
|
sql.Column('id', sql.Integer, primary_key=True, nullable=False),
|
||||||
|
sql.Column('created_at', sql.DateTime),
|
||||||
|
sql.Column('updated_at', sql.DateTime),
|
||||||
|
sql.Column('deleted_at', sql.DateTime),
|
||||||
|
sql.Column('deleted', sql.Integer, default=0),
|
||||||
|
sql.Column('path', sql.String(2000)),
|
||||||
|
sql.Column('share_id', sql.String(36),
|
||||||
|
sql.ForeignKey('shares.id', name="sel_id_fk")),
|
||||||
|
mysql_engine='InnoDB',
|
||||||
|
mysql_charset='utf8'
|
||||||
|
)
|
||||||
|
|
||||||
|
op.execute("INSERT INTO share_export_locations "
|
||||||
|
"(created_at, deleted, path, share_id) "
|
||||||
|
"SELECT created_at, 0, export_location, id "
|
||||||
|
"FROM shares")
|
||||||
|
|
||||||
|
op.drop_column('shares', 'export_location')
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
"""Remove share_export_locations table.
|
||||||
|
|
||||||
|
This method can lead to data loss because only first export_location
|
||||||
|
is saved in shares table.
|
||||||
|
"""
|
||||||
|
|
||||||
|
op.add_column('shares',
|
||||||
|
sql.Column('export_location', sql.String(255)))
|
||||||
|
|
||||||
|
connection = op.get_bind()
|
||||||
|
|
||||||
|
export_locations = connection.execute(
|
||||||
|
"SELECT share_id, path FROM share_export_locations sel WHERE deleted=0"
|
||||||
|
" AND updated_at = ("
|
||||||
|
" SELECT MIN(updated_at) FROM share_export_locations sel2 "
|
||||||
|
" WHERE sel.share_id = sel2.share_id)")
|
||||||
|
|
||||||
|
shares = sql.Table('shares', sql.MetaData(),
|
||||||
|
autoload=True, autoload_with=connection)
|
||||||
|
|
||||||
|
for location in export_locations:
|
||||||
|
update = shares.update().where(shares.c.id == location['share_id']). \
|
||||||
|
values(export_location=location['path'])
|
||||||
|
connection.execute(update)
|
||||||
|
|
||||||
|
op.drop_table('share_export_locations')
|
@ -1654,6 +1654,68 @@ def _share_metadata_get_item(context, share_id, key, session=None):
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
#################################
|
||||||
|
|
||||||
|
@require_context
|
||||||
|
@require_share_exists
|
||||||
|
def share_export_locations_get(context, share_id):
|
||||||
|
rows = _share_export_locations_get(context, share_id)
|
||||||
|
|
||||||
|
return [location['path'] for location in rows]
|
||||||
|
|
||||||
|
|
||||||
|
def _share_export_locations_get(context, share_id, session=None):
|
||||||
|
if not session:
|
||||||
|
session = get_session()
|
||||||
|
|
||||||
|
return model_query(context, models.ShareExportLocations,
|
||||||
|
session=session). \
|
||||||
|
filter_by(share_id=share_id).all()
|
||||||
|
|
||||||
|
|
||||||
|
@require_context
|
||||||
|
@require_share_exists
|
||||||
|
def share_export_locations_update(context, share_id, export_locations, delete):
|
||||||
|
# NOTE(u_glide):
|
||||||
|
# Backward compatibility code for drivers,
|
||||||
|
# which returns single export_location as string
|
||||||
|
if not isinstance(export_locations, list):
|
||||||
|
export_locations = [export_locations]
|
||||||
|
|
||||||
|
session = get_session()
|
||||||
|
|
||||||
|
with session.begin():
|
||||||
|
location_rows = _share_export_locations_get(
|
||||||
|
context, share_id, session=session)
|
||||||
|
|
||||||
|
current_locations = set([l['path'] for l in location_rows])
|
||||||
|
|
||||||
|
new_locations = set(export_locations)
|
||||||
|
add_locations = new_locations.difference(current_locations)
|
||||||
|
|
||||||
|
# Set existing export location to deleted if delete argument is True
|
||||||
|
if delete:
|
||||||
|
delete_locations = current_locations.difference(new_locations)
|
||||||
|
|
||||||
|
for location in location_rows:
|
||||||
|
if location['path'] in delete_locations:
|
||||||
|
location.update({'deleted': True})
|
||||||
|
location.save(session=session)
|
||||||
|
else:
|
||||||
|
export_locations = list(current_locations.union(new_locations))
|
||||||
|
|
||||||
|
# Now add new export locations
|
||||||
|
for path in add_locations:
|
||||||
|
location_ref = models.ShareExportLocations()
|
||||||
|
location_ref.update({'path': path, 'share_id': share_id})
|
||||||
|
location_ref.save(session=session)
|
||||||
|
|
||||||
|
return export_locations
|
||||||
|
|
||||||
|
|
||||||
|
#################################
|
||||||
|
|
||||||
|
|
||||||
@require_context
|
@require_context
|
||||||
def security_service_create(context, values):
|
def security_service_create(context, values):
|
||||||
if not values.get('id'):
|
if not values.get('id'):
|
||||||
|
@ -176,6 +176,11 @@ class Share(BASE, ManilaBase):
|
|||||||
def name(self):
|
def name(self):
|
||||||
return CONF.share_name_template % self.id
|
return CONF.share_name_template % self.id
|
||||||
|
|
||||||
|
@property
|
||||||
|
def export_location(self):
|
||||||
|
if len(self.export_locations) > 0:
|
||||||
|
return self.export_locations[0]['path']
|
||||||
|
|
||||||
id = Column(String(36), primary_key=True)
|
id = Column(String(36), primary_key=True)
|
||||||
deleted = Column(String(36), default='False')
|
deleted = Column(String(36), default='False')
|
||||||
user_id = Column(String(255))
|
user_id = Column(String(255))
|
||||||
@ -191,7 +196,12 @@ class Share(BASE, ManilaBase):
|
|||||||
display_description = Column(String(255))
|
display_description = Column(String(255))
|
||||||
snapshot_id = Column(String(36))
|
snapshot_id = Column(String(36))
|
||||||
share_proto = Column(String(255))
|
share_proto = Column(String(255))
|
||||||
export_location = Column(String(255))
|
export_locations = orm.relationship(
|
||||||
|
"ShareExportLocations",
|
||||||
|
lazy='immediate',
|
||||||
|
primaryjoin='and_('
|
||||||
|
'Share.id == ShareExportLocations.share_id, '
|
||||||
|
'ShareExportLocations.deleted == False)')
|
||||||
share_network_id = Column(String(36), ForeignKey('share_networks.id'),
|
share_network_id = Column(String(36), ForeignKey('share_networks.id'),
|
||||||
nullable=True)
|
nullable=True)
|
||||||
share_type_id = Column(String(36), ForeignKey('share_types.id'),
|
share_type_id = Column(String(36), ForeignKey('share_types.id'),
|
||||||
@ -201,6 +211,15 @@ class Share(BASE, ManilaBase):
|
|||||||
is_public = Column(Boolean, default=False)
|
is_public = Column(Boolean, default=False)
|
||||||
|
|
||||||
|
|
||||||
|
class ShareExportLocations(BASE, ManilaBase):
|
||||||
|
"""Represents export locations of shares."""
|
||||||
|
__tablename__ = 'share_export_locations'
|
||||||
|
|
||||||
|
id = Column(Integer, primary_key=True)
|
||||||
|
share_id = Column(String(36), ForeignKey('shares.id'), nullable=False)
|
||||||
|
path = Column(String(2000))
|
||||||
|
|
||||||
|
|
||||||
class ShareTypes(BASE, ManilaBase):
|
class ShareTypes(BASE, ManilaBase):
|
||||||
"""Represent possible share_types of volumes offered."""
|
"""Represent possible share_types of volumes offered."""
|
||||||
__tablename__ = "share_types"
|
__tablename__ = "share_types"
|
||||||
|
@ -276,23 +276,32 @@ class ShareManager(manager.SchedulerDependentManager):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
if snapshot_ref:
|
if snapshot_ref:
|
||||||
export_location = self.driver.create_share_from_snapshot(
|
export_locations = self.driver.create_share_from_snapshot(
|
||||||
context, share_ref, snapshot_ref,
|
context, share_ref, snapshot_ref,
|
||||||
share_server=share_server)
|
share_server=share_server)
|
||||||
else:
|
else:
|
||||||
export_location = self.driver.create_share(
|
export_locations = self.driver.create_share(
|
||||||
context, share_ref, share_server=share_server)
|
context, share_ref, share_server=share_server)
|
||||||
self.db.share_update(context, share_id,
|
|
||||||
{'export_location': export_location})
|
self.db.share_export_locations_update(context, share_id,
|
||||||
|
export_locations)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
with excutils.save_and_reraise_exception():
|
with excutils.save_and_reraise_exception():
|
||||||
LOG.error(_LE("Share %s failed on creation."), share_id)
|
LOG.error(_LE("Share %s failed on creation."), share_id)
|
||||||
detail_data = getattr(e, 'detail_data', {})
|
detail_data = getattr(e, 'detail_data', {})
|
||||||
if (isinstance(detail_data, dict) and
|
|
||||||
detail_data.get('export_location')):
|
def get_export_location(details):
|
||||||
self.db.share_update(
|
if not isinstance(details, dict):
|
||||||
context, share_id,
|
return None
|
||||||
{'export_location': detail_data['export_location']})
|
return details.get('export_locations',
|
||||||
|
details.get('export_location'))
|
||||||
|
|
||||||
|
export_locations = get_export_location(detail_data)
|
||||||
|
|
||||||
|
if export_locations:
|
||||||
|
self.db.share_export_locations_update(
|
||||||
|
context, share_id, export_locations)
|
||||||
else:
|
else:
|
||||||
LOG.warning(_LW('Share information in exception '
|
LOG.warning(_LW('Share information in exception '
|
||||||
'can not be written to db because it '
|
'can not be written to db because it '
|
||||||
|
@ -26,6 +26,8 @@ def stub_share(id, **kwargs):
|
|||||||
'id': id,
|
'id': id,
|
||||||
'share_proto': 'FAKEPROTO',
|
'share_proto': 'FAKEPROTO',
|
||||||
'export_location': 'fake_location',
|
'export_location': 'fake_location',
|
||||||
|
'export_locations': [{'path': 'fake_location'},
|
||||||
|
{'path': 'fake_location2'}],
|
||||||
'user_id': 'fakeuser',
|
'user_id': 'fakeuser',
|
||||||
'project_id': 'fakeproject',
|
'project_id': 'fakeproject',
|
||||||
'host': 'fakehost',
|
'host': 'fakehost',
|
||||||
|
@ -78,6 +78,7 @@ class ShareApiTest(test.TestCase):
|
|||||||
'availability_zone': 'fakeaz',
|
'availability_zone': 'fakeaz',
|
||||||
'description': 'displaydesc',
|
'description': 'displaydesc',
|
||||||
'export_location': 'fake_location',
|
'export_location': 'fake_location',
|
||||||
|
'export_locations': ['fake_location', 'fake_location2'],
|
||||||
'project_id': 'fakeproject',
|
'project_id': 'fakeproject',
|
||||||
'host': 'fakehost',
|
'host': 'fakehost',
|
||||||
'created_at': datetime.datetime(1, 1, 1, 1, 1, 1),
|
'created_at': datetime.datetime(1, 1, 1, 1, 1, 1),
|
||||||
@ -550,6 +551,7 @@ class ShareApiTest(test.TestCase):
|
|||||||
'status': 'fakestatus',
|
'status': 'fakestatus',
|
||||||
'description': 'displaydesc',
|
'description': 'displaydesc',
|
||||||
'export_location': 'fake_location',
|
'export_location': 'fake_location',
|
||||||
|
'export_locations': ['fake_location', 'fake_location2'],
|
||||||
'availability_zone': 'fakeaz',
|
'availability_zone': 'fakeaz',
|
||||||
'name': 'displayname',
|
'name': 'displayname',
|
||||||
'share_proto': 'FAKEPROTO',
|
'share_proto': 'FAKEPROTO',
|
||||||
|
@ -36,7 +36,8 @@ class SQLAlchemyAPIShareTestCase(test.TestCase):
|
|||||||
api.share_get_all_by_host(
|
api.share_get_all_by_host(
|
||||||
self.ctxt, 'foo'),
|
self.ctxt, 'foo'),
|
||||||
ignored_keys=['share_type',
|
ignored_keys=['share_type',
|
||||||
'share_type_id'])
|
'share_type_id',
|
||||||
|
'export_locations'])
|
||||||
|
|
||||||
def test_share_filter_all_by_host_with_pools_multiple_hosts(self):
|
def test_share_filter_all_by_host_with_pools_multiple_hosts(self):
|
||||||
shares = [[api.share_create(self.ctxt, {'host': value})
|
shares = [[api.share_create(self.ctxt, {'host': value})
|
||||||
@ -47,4 +48,5 @@ class SQLAlchemyAPIShareTestCase(test.TestCase):
|
|||||||
api.share_get_all_by_host(
|
api.share_get_all_by_host(
|
||||||
self.ctxt, 'foo'),
|
self.ctxt, 'foo'),
|
||||||
ignored_keys=['share_type',
|
ignored_keys=['share_type',
|
||||||
'share_type_id'])
|
'share_type_id',
|
||||||
|
'export_locations'])
|
||||||
|
@ -47,11 +47,11 @@ class FakeShareDriver(driver.ShareDriver):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
def create_share(self, context, share, share_server=None):
|
def create_share(self, context, share, share_server=None):
|
||||||
pass
|
return ['/fake/path', '/fake/path2']
|
||||||
|
|
||||||
def create_share_from_snapshot(self, context, share, snapshot,
|
def create_share_from_snapshot(self, context, share, snapshot,
|
||||||
share_server=None):
|
share_server=None):
|
||||||
pass
|
return ['/fake/path', '/fake/path2']
|
||||||
|
|
||||||
def delete_share(self, context, share, share_server=None):
|
def delete_share(self, context, share, share_server=None):
|
||||||
pass
|
pass
|
||||||
|
@ -389,6 +389,8 @@ class ShareManagerTestCase(test.TestCase):
|
|||||||
|
|
||||||
shr = db.share_get(self.context, share_id)
|
shr = db.share_get(self.context, share_id)
|
||||||
self.assertEqual(shr['status'], 'available')
|
self.assertEqual(shr['status'], 'available')
|
||||||
|
self.assertTrue(len(shr['export_location']) > 0)
|
||||||
|
self.assertEqual(2, len(shr['export_locations']))
|
||||||
|
|
||||||
def test_create_delete_share_snapshot(self):
|
def test_create_delete_share_snapshot(self):
|
||||||
"""Test share's snapshot can be created and deleted."""
|
"""Test share's snapshot can be created and deleted."""
|
||||||
@ -605,15 +607,17 @@ class ShareManagerTestCase(test.TestCase):
|
|||||||
shr = db.share_get(self.context, share_id)
|
shr = db.share_get(self.context, share_id)
|
||||||
self.assertEqual(shr['status'], 'available')
|
self.assertEqual(shr['status'], 'available')
|
||||||
self.assertEqual(shr['share_server_id'], share_srv['id'])
|
self.assertEqual(shr['share_server_id'], share_srv['id'])
|
||||||
|
self.assertTrue(len(shr['export_location']) > 0)
|
||||||
|
self.assertEqual(1, len(shr['export_locations']))
|
||||||
|
|
||||||
def test_create_share_with_error_in_driver(self):
|
@ddt.data('export_location', 'export_locations')
|
||||||
|
def test_create_share_with_error_in_driver(self, details_key):
|
||||||
"""Test db updates if share creation fails in driver."""
|
"""Test db updates if share creation fails in driver."""
|
||||||
share = self._create_share()
|
share = self._create_share()
|
||||||
share_id = share['id']
|
share_id = share['id']
|
||||||
some_data = 'fake_location'
|
some_data = 'fake_location'
|
||||||
self.share_manager.driver = mock.Mock()
|
self.share_manager.driver = mock.Mock()
|
||||||
e = exception.ManilaException(
|
e = exception.ManilaException(detail_data={details_key: some_data})
|
||||||
detail_data={'export_location': some_data})
|
|
||||||
self.share_manager.driver.create_share.side_effect = e
|
self.share_manager.driver.create_share.side_effect = e
|
||||||
self.assertRaises(
|
self.assertRaises(
|
||||||
exception.ManilaException,
|
exception.ManilaException,
|
||||||
|
Loading…
Reference in New Issue
Block a user