Browse Source

Merge "Pool-aware Scheduler Support"

Jenkins 4 years ago
parent
commit
79857b61c5

+ 3
- 1
etc/manila/policy.json View File

@@ -60,5 +60,7 @@
60 60
     "share_network:show": [["rule:default"]],
61 61
     "share_network:add_security_service": [["rule:default"]],
62 62
     "share_network:remove_security_service": [["rule:default"]],
63
-    "share_network:get_all_share_networks": [["rule:admin_api"]]
63
+    "share_network:get_all_share_networks": [["rule:admin_api"]],
64
+
65
+    "scheduler_extension:scheduler_stats:get_pools" : "rule:admin_api"
64 66
 }

+ 63
- 0
manila/api/contrib/scheduler_stats.py View File

@@ -0,0 +1,63 @@
1
+# Copyright (c) 2014 eBay Inc.
2
+# Copyright (c) 2015 Rushil Chugh
3
+# All Rights Reserved.
4
+#
5
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
6
+#    not use this file except in compliance with the License. You may obtain
7
+#    a copy of the License at
8
+#
9
+#         http://www.apache.org/licenses/LICENSE-2.0
10
+#
11
+#    Unless required by applicable law or agreed to in writing, software
12
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
13
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
14
+#    License for the specific language governing permissions and limitations
15
+#    under the License.
16
+
17
+"""The Scheduler Stats extension"""
18
+
19
+from manila.api import extensions
20
+from manila.api.openstack import wsgi
21
+from manila.api.views import scheduler_stats as scheduler_stats_view
22
+from manila.scheduler import rpcapi
23
+
24
+
25
+def authorize(context, action_name):
26
+    action = 'scheduler_stats:%s' % action_name
27
+    extensions.extension_authorizer('scheduler', action)(context)
28
+
29
+
30
+class SchedulerStatsController(wsgi.Controller):
31
+    """The Scheduler Stats controller for the OpenStack API."""
32
+
33
+    _view_builder_class = scheduler_stats_view.ViewBuilder
34
+
35
+    def __init__(self):
36
+        self.scheduler_api = rpcapi.SchedulerAPI()
37
+        super(SchedulerStatsController, self).__init__()
38
+
39
+    def get_pools(self, req):
40
+        """List all active pools in scheduler."""
41
+        context = req.environ['manila.context']
42
+        authorize(context, 'get_pools')
43
+
44
+        detail = req.params.get('detail', False)
45
+        pools = self.scheduler_api.get_pools(context, filters=None)
46
+
47
+        return self._view_builder.pools(req, pools, detail)
48
+
49
+
50
+class Scheduler_stats(extensions.ExtensionDescriptor):
51
+    """Scheduler stats support."""
52
+
53
+    name = "Scheduler_stats"
54
+    alias = "scheduler-stats"
55
+    updated = "2015-08-01T00:00:00+00:00"
56
+
57
+    def get_resources(self):
58
+        res = extensions.ResourceExtension(
59
+            Scheduler_stats.alias,
60
+            SchedulerStatsController(),
61
+            collection_actions={"get_pools": "GET"})
62
+
63
+        return [res]

+ 46
- 0
manila/api/views/scheduler_stats.py View File

@@ -0,0 +1,46 @@
1
+# Copyright (c) 2014 eBay Inc.
2
+# Copyright (c) 2015 Rushil Chugh
3
+# All Rights Reserved.
4
+#
5
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
6
+#    not use this file except in compliance with the License. You may obtain
7
+#    a copy of the License at
8
+#
9
+#         http://www.apache.org/licenses/LICENSE-2.0
10
+#
11
+#    Unless required by applicable law or agreed to in writing, software
12
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
13
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
14
+#    License for the specific language governing permissions and limitations
15
+#    under the License.
16
+
17
+from manila.api import common
18
+
19
+
20
+class ViewBuilder(common.ViewBuilder):
21
+    """Model scheduler-stats API responses as a python dictionary."""
22
+
23
+    _collection_name = "scheduler-stats"
24
+
25
+    def summary(self, request, pool):
26
+        """Summary view of a single pool."""
27
+        return {
28
+            'pool': {
29
+                'name': pool.get('name'),
30
+            }
31
+        }
32
+
33
+    def detail(self, request, pool):
34
+        """Detailed view of a single pool."""
35
+        return {
36
+            'pool': {
37
+                'name': pool.get('name'),
38
+                'capabilities': pool.get('capabilities'),
39
+            }
40
+        }
41
+
42
+    def pools(self, request, pools, detail):
43
+        """Summary view of a list of pools seen by scheduler."""
44
+        pdict = self.detail if detail else self.summary
45
+
46
+        return {"pools": [pdict(request, pool)['pool'] for pool in pools]}

+ 8
- 2
manila/db/sqlalchemy/api.py View File

@@ -1193,8 +1193,13 @@ def _share_get_all_with_filters(context, project_id=None, share_server_id=None,
1193 1193
         query = query.filter_by(project_id=project_id)
1194 1194
     if share_server_id:
1195 1195
         query = query.filter_by(share_server_id=share_server_id)
1196
-    if host:
1197
-        query = query.filter_by(host=host)
1196
+    if host and isinstance(host, six.string_types):
1197
+        session = get_session()
1198
+        with session.begin():
1199
+            host_attr = getattr(models.Share, 'host')
1200
+            conditions = [host_attr == host,
1201
+                          host_attr.op('LIKE')(host + '#%')]
1202
+            query = query.filter(or_(*conditions))
1198 1203
 
1199 1204
     # Apply filters
1200 1205
     if not filters:
@@ -1244,6 +1249,7 @@ def share_get_all(context, filters=None, sort_key=None, sort_dir=None):
1244 1249
 @require_admin_context
1245 1250
 def share_get_all_by_host(context, host, filters=None,
1246 1251
                           sort_key=None, sort_dir=None):
1252
+    """Retrieves all shares hosted on a host."""
1247 1253
     query = _share_get_all_with_filters(
1248 1254
         context, host=host, filters=filters,
1249 1255
         sort_key=sort_key, sort_dir=sort_dir,

+ 5
- 0
manila/scheduler/driver.py View File

@@ -88,3 +88,8 @@ class Scheduler(object):
88 88
     def schedule_create_share(self, context, request_spec, filter_properties):
89 89
         """Must override schedule method for scheduler to work."""
90 90
         raise NotImplementedError(_("Must implement schedule_create_share"))
91
+
92
+    def get_pools(self, context, filters):
93
+        """Must override schedule method for scheduler to work."""
94
+        raise NotImplementedError(_(
95
+            "Must implement get_pools"))

+ 4
- 0
manila/scheduler/filter_scheduler.py View File

@@ -49,6 +49,10 @@ class FilterScheduler(driver.Scheduler):
49 49
         """Fetch options dictionary. Broken out for testing."""
50 50
         return self.options.get_configuration()
51 51
 
52
+    def get_pools(self, context, filters):
53
+        # TODO(zhiteng) Add filters support
54
+        return self.host_manager.get_pools(context)
55
+
52 56
     def _post_select_populate_filter_properties(self, filter_properties,
53 57
                                                 host_state):
54 58
         """Add additional information to filter properties.

+ 205
- 23
manila/scheduler/host_manager.py View File

@@ -1,4 +1,5 @@
1 1
 # Copyright (c) 2011 OpenStack, LLC.
2
+# Copyright (c) 2015 Rushil Chugh
2 3
 # All Rights Reserved.
3 4
 #
4 5
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -29,6 +30,7 @@ from manila.i18n import _LI
29 30
 from manila.openstack.common import log as logging
30 31
 from manila.openstack.common.scheduler import filters
31 32
 from manila.openstack.common.scheduler import weights
33
+from manila.share import utils as share_utils
32 34
 from manila import utils
33 35
 
34 36
 host_manager_opts = [
@@ -89,6 +91,8 @@ class HostState(object):
89 91
     """Mutable and immutable information tracked for a host."""
90 92
 
91 93
     def __init__(self, host, capabilities=None, service=None):
94
+        self.capabilities = None
95
+        self.service = None
92 96
         self.host = host
93 97
         self.update_capabilities(capabilities, service)
94 98
 
@@ -103,6 +107,8 @@ class HostState(object):
103 107
         self.free_capacity_gb = None
104 108
         self.reserved_percentage = 0
105 109
 
110
+        # PoolState for all pools
111
+        self.pools = {}
106 112
         self.updated = None
107 113
 
108 114
     def update_capabilities(self, capabilities=None, service=None):
@@ -115,23 +121,142 @@ class HostState(object):
115 121
             service = {}
116 122
         self.service = ReadOnlyDict(service)
117 123
 
118
-    def update_from_share_capability(self, capability):
119
-        """Update information about a host from its volume_node info."""
124
+    def update_from_share_capability(self, capability, service=None):
125
+        """Update information about a host from its share_node info.
126
+
127
+        'capability' is the status info reported by share backend, a typical
128
+        capability looks like this:
129
+
130
+        capability = {
131
+            'share_backend_name': 'Local NFS',    #\
132
+            'vendor_name': 'OpenStack',           #  backend level
133
+            'driver_version': '1.0',              #  mandatory/fixed
134
+            'storage_protocol': 'NFS',            #/ stats&capabilities
135
+
136
+            'active_shares': 10,                  #\
137
+            'IOPS_provisioned': 30000,            #  optional custom
138
+            'fancy_capability_1': 'eat',          #  stats & capabilities
139
+            'fancy_capability_2': 'drink',        #/
140
+
141
+            'pools': [
142
+                {'pool_name': '1st pool',         #\
143
+                 'total_capacity_gb': 500,        #  mandatory stats for
144
+                 'free_capacity_gb': 230,         #  pools
145
+                 'allocated_capacity_gb': 270,    # |
146
+                 'QoS_support': 'False',          # |
147
+                 'reserved_percentage': 0,        #/
148
+
149
+                 'dying_disks': 100,              #\
150
+                 'super_hero_1': 'spider-man',    #  optional custom
151
+                 'super_hero_2': 'flash',         #  stats & capabilities
152
+                 'super_hero_3': 'neoncat'        #/
153
+                 },
154
+                {'pool_name': '2nd pool',
155
+                 'total_capacity_gb': 1024,
156
+                 'free_capacity_gb': 1024,
157
+                 'allocated_capacity_gb': 0,
158
+                 'QoS_support': 'False',
159
+                 'reserved_percentage': 0,
160
+
161
+                 'dying_disks': 200,
162
+                 'super_hero_1': 'superman',
163
+                 'super_hero_2': ' ',
164
+                 'super_hero_2': 'Hulk',
165
+                 }
166
+            ]
167
+        }
168
+        """
169
+        self.update_capabilities(capability, service)
170
+
120 171
         if capability:
121 172
             if self.updated and self.updated > capability['timestamp']:
122 173
                 return
123 174
 
124
-            self.share_backend = capability.get('share_backend_name', None)
125
-            self.vendor_name = capability.get('vendor_name', None)
126
-            self.driver_version = capability.get('driver_version', None)
127
-            self.storage_protocol = capability.get('storage_protocol', None)
128
-            self.QoS_support = capability.get('QoS_support', False)
175
+            # Update backend level info
176
+            self.update_backend(capability)
129 177
 
130
-            self.total_capacity_gb = capability['total_capacity_gb']
131
-            self.free_capacity_gb = capability['free_capacity_gb']
132
-            self.reserved_percentage = capability['reserved_percentage']
178
+            # Update pool level info
179
+            self.update_pools(capability, service)
133 180
 
134
-            self.updated = capability['timestamp']
181
+    def update_pools(self, capability, service):
182
+        """Update storage pools information from backend reported info."""
183
+        if not capability:
184
+            return
185
+
186
+        pools = capability.get('pools', None)
187
+        active_pools = set()
188
+        if pools and isinstance(pools, list):
189
+            # Update all pools stats according to information from list
190
+            # of pools in share capacity
191
+            for pool_cap in pools:
192
+                pool_name = pool_cap['pool_name']
193
+                self._append_backend_info(pool_cap)
194
+                cur_pool = self.pools.get(pool_name, None)
195
+                if not cur_pool:
196
+                    # Add new pool
197
+                    cur_pool = PoolState(self.host, pool_cap, pool_name)
198
+                    self.pools[pool_name] = cur_pool
199
+                cur_pool.update_from_share_capability(pool_cap, service)
200
+
201
+                active_pools.add(pool_name)
202
+        elif pools is None:
203
+            # To handle legacy driver that doesn't report pool
204
+            # information in the capability, we have to prepare
205
+            # a pool from backend level info, or to update the one
206
+            # we created in self.pools.
207
+            pool_name = self.share_backend_name
208
+            if pool_name is None:
209
+                # To get DEFAULT_POOL_NAME
210
+                pool_name = share_utils.extract_host(self.host, 'pool', True)
211
+
212
+            if len(self.pools) == 0:
213
+                # No pool was there
214
+                single_pool = PoolState(self.host, capability, pool_name)
215
+                self._append_backend_info(capability)
216
+                self.pools[pool_name] = single_pool
217
+            else:
218
+                # This is a update from legacy driver
219
+                try:
220
+                    single_pool = self.pools[pool_name]
221
+                except KeyError:
222
+                    single_pool = PoolState(self.host, capability, pool_name)
223
+                    self._append_backend_info(capability)
224
+                    self.pools[pool_name] = single_pool
225
+
226
+            single_pool.update_from_share_capability(capability, service)
227
+            active_pools.add(pool_name)
228
+
229
+        # Remove non-active pools from self.pools
230
+        nonactive_pools = set(self.pools.keys()) - active_pools
231
+        for pool in nonactive_pools:
232
+            LOG.debug("Removing non-active pool %(pool)s @ %(host)s "
233
+                      "from scheduler cache.",
234
+                      {'pool': pool, 'host': self.host})
235
+            del self.pools[pool]
236
+
237
+    def _append_backend_info(self, pool_cap):
238
+        # Fill backend level info to pool if needed.
239
+        if not pool_cap.get('share_backend_name'):
240
+            pool_cap['share_backend_name'] = self.share_backend_name
241
+
242
+        if not pool_cap.get('storage_protocol'):
243
+            pool_cap['storage_protocol'] = self.storage_protocol
244
+
245
+        if not pool_cap.get('vendor_name'):
246
+            pool_cap['vendor_name'] = self.vendor_name
247
+
248
+        if not pool_cap.get('driver_version'):
249
+            pool_cap['driver_version'] = self.driver_version
250
+
251
+        if not pool_cap.get('timestamp'):
252
+            pool_cap['timestamp'] = self.updated
253
+
254
+    def update_backend(self, capability):
255
+        self.share_backend_name = capability.get('share_backend_name')
256
+        self.vendor_name = capability.get('vendor_name')
257
+        self.driver_version = capability.get('driver_version')
258
+        self.storage_protocol = capability.get('storage_protocol')
259
+        self.updated = capability['timestamp']
135 260
 
136 261
     def consume_from_share(self, share):
137 262
         """Incrementally update host state from an share."""
@@ -146,6 +271,41 @@ class HostState(object):
146 271
             self.free_capacity_gb -= share_gb
147 272
         self.updated = timeutils.utcnow()
148 273
 
274
+    def __repr__(self):
275
+        return ("host: '%(host)s', free_capacity_gb: %(free)s, "
276
+                "pools: %(pools)s" % {'host': self.host,
277
+                                      'free': self.free_capacity_gb,
278
+                                      'pools': self.pools}
279
+                )
280
+
281
+
282
+class PoolState(HostState):
283
+    def __init__(self, host, capabilities, pool_name):
284
+        new_host = share_utils.append_host(host, pool_name)
285
+        super(PoolState, self).__init__(new_host, capabilities)
286
+        self.pool_name = pool_name
287
+        # No pools in pool
288
+        self.pools = None
289
+
290
+    def update_from_share_capability(self, capability, service=None):
291
+        """Update information about a pool from its share_node info."""
292
+        self.update_capabilities(capability, service)
293
+        if capability:
294
+            if self.updated and self.updated > capability['timestamp']:
295
+                return
296
+            self.update_backend(capability)
297
+
298
+            self.total_capacity_gb = capability['total_capacity_gb']
299
+            self.free_capacity_gb = capability['free_capacity_gb']
300
+            self.allocated_capacity_gb = capability.get(
301
+                'allocated_capacity_gb', 0)
302
+            self.QoS_support = capability.get('QoS_support', False)
303
+            self.reserved_percentage = capability['reserved_percentage']
304
+
305
+    def update_pools(self, capability):
306
+        # Do nothing, since we don't have pools within pool, yet
307
+        pass
308
+
149 309
 
150 310
 class HostManager(object):
151 311
     """Base HostManager class."""
@@ -243,14 +403,16 @@ class HostManager(object):
243 403
                       {'service_name': service_name, 'host': host})
244 404
             return
245 405
 
246
-        LOG.debug("Received %(service_name)s service update from "
247
-                  "%(host)s.", {"service_name": service_name, "host": host})
248
-
249 406
         # Copy the capabilities, so we don't modify the original dict
250 407
         capab_copy = dict(capabilities)
251 408
         capab_copy["timestamp"] = timeutils.utcnow()  # Reported time
252 409
         self.service_states[host] = capab_copy
253 410
 
411
+        LOG.debug("Received %(service_name)s service update from "
412
+                  "%(host)s: %(cap)s" %
413
+                  {'service_name': service_name, 'host': host,
414
+                   'cap': capabilities})
415
+
254 416
     def get_all_host_states_share(self, context):
255 417
         """Get all hosts and their states.
256 418
 
@@ -263,6 +425,7 @@ class HostManager(object):
263 425
         """
264 426
 
265 427
         # Get resource usage across the available share nodes:
428
+        all_pools = {}
266 429
         topic = CONF.share_topic
267 430
         share_services = db.service_get_all_by_topic(context, topic)
268 431
         for service in share_services:
@@ -274,17 +437,36 @@ class HostManager(object):
274 437
                 continue
275 438
             capabilities = self.service_states.get(host, None)
276 439
             host_state = self.host_state_map.get(host)
277
-            if host_state:
278
-                # copy capabilities to host_state.capabilities
279
-                host_state.update_capabilities(capabilities,
280
-                                               dict(six.iteritems(service)))
281
-            else:
440
+            if not host_state:
282 441
                 host_state = self.host_state_cls(
283 442
                     host,
284 443
                     capabilities=capabilities,
285 444
                     service=dict(six.iteritems(service)))
286 445
                 self.host_state_map[host] = host_state
287
-            # update host_state
288
-            host_state.update_from_share_capability(capabilities)
289
-
290
-        return self.host_state_map.itervalues()
446
+            # Update host_state
447
+            host_state.update_from_share_capability(
448
+                capabilities, service=dict(six.iteritems(service)))
449
+            # Build a pool_state map and return that instead of host_state_map
450
+            state = self.host_state_map[host]
451
+            for key in state.pools:
452
+                pool = state.pools[key]
453
+                # Use host.pool_name to make sure key is unique
454
+                pool_key = '.'.join([host, pool.pool_name])
455
+                all_pools[pool_key] = pool
456
+
457
+        return six.itervalues(all_pools)
458
+
459
+    def get_pools(self, context):
460
+        """Returns a dict of all pools on all hosts HostManager knows about."""
461
+
462
+        all_pools = []
463
+        for host, state in self.host_state_map.items():
464
+            for key in state.pools:
465
+                pool = state.pools[key]
466
+                # Use host.pool_name to make sure key is unique
467
+                pool_key = share_utils.append_host(host, pool.pool_name)
468
+                new_pool = dict(name=pool_key)
469
+                new_pool.update(dict(capabilities=pool.capabilities))
470
+                all_pools.append(new_pool)
471
+
472
+        return all_pools

+ 9
- 0
manila/scheduler/manager.py View File

@@ -19,6 +19,7 @@
19 19
 Scheduler Service
20 20
 """
21 21
 
22
+from oslo import messaging
22 23
 from oslo_config import cfg
23 24
 from oslo_utils import excutils
24 25
 from oslo_utils import importutils
@@ -46,6 +47,10 @@ CONF.register_opt(scheduler_driver_opt)
46 47
 class SchedulerManager(manager.Manager):
47 48
     """Chooses a host to create shares."""
48 49
 
50
+    RPC_API_VERSION = '1.1'
51
+
52
+    target = messaging.Target(version=RPC_API_VERSION)
53
+
49 54
     def __init__(self, scheduler_driver=None, service_name=None,
50 55
                  *args, **kwargs):
51 56
         if not scheduler_driver:
@@ -88,6 +93,10 @@ class SchedulerManager(manager.Manager):
88 93
                                                        context, ex,
89 94
                                                        request_spec)
90 95
 
96
+    def get_pools(self, context, filters=None):
97
+        """Get active pools from the scheduler's cache."""
98
+        return self.driver.get_pools(context, filters)
99
+
91 100
     def _set_share_error_state_and_notify(self, method, context, ex,
92 101
                                           request_spec):
93 102
         LOG.warning(_LW("Failed to schedule_%(method)s: %(ex)s"),

+ 8
- 2
manila/scheduler/rpcapi.py View File

@@ -31,15 +31,16 @@ class SchedulerAPI(object):
31 31
     API version history:
32 32
 
33 33
         1.0 - Initial version.
34
+        1.1 - Add get_pools method
34 35
     '''
35 36
 
36
-    RPC_API_VERSION = '1.0'
37
+    RPC_API_VERSION = '1.1'
37 38
 
38 39
     def __init__(self):
39 40
         super(SchedulerAPI, self).__init__()
40 41
         target = messaging.Target(topic=CONF.scheduler_topic,
41 42
                                   version=self.RPC_API_VERSION)
42
-        self.client = rpc.get_client(target, version_cap='1.0')
43
+        self.client = rpc.get_client(target, version_cap='1.1')
43 44
 
44 45
     def create_share(self, ctxt, topic, share_id, snapshot_id=None,
45 46
                      request_spec=None, filter_properties=None):
@@ -66,3 +67,8 @@ class SchedulerAPI(object):
66 67
             host=host,
67 68
             capabilities=capabilities,
68 69
         )
70
+
71
+    def get_pools(self, ctxt, filters=None):
72
+        cctxt = self.client.prepare(version='1.1')
73
+        return cctxt.call(ctxt, 'get_pools',
74
+                          filters=filters)

+ 9
- 0
manila/share/driver.py View File

@@ -174,6 +174,9 @@ class ShareDriver(object):
174 174
         """
175 175
         super(ShareDriver, self).__init__()
176 176
         self.configuration = kwargs.get('configuration', None)
177
+        self._stats = {}
178
+
179
+        self.pools = {}
177 180
         if self.configuration:
178 181
             self.configuration.append_config_values(share_opts)
179 182
             network_config_group = (self.configuration.network_config_group or
@@ -243,6 +246,12 @@ class ShareDriver(object):
243 246
         """Is called to remove snapshot."""
244 247
         raise NotImplementedError()
245 248
 
249
+    def get_pool(self, share):
250
+        """Return pool name where the share resides on.
251
+
252
+        :param share: The share hosted by the driver.
253
+        """
254
+
246 255
     def ensure_share(self, context, share, share_server=None):
247 256
         """Invoked to sure that share is exported."""
248 257
         raise NotImplementedError()

+ 23
- 0
manila/share/manager.py View File

@@ -35,6 +35,7 @@ from manila import manager
35 35
 from manila.openstack.common import log as logging
36 36
 from manila import quota
37 37
 import manila.share.configuration
38
+from manila.share import utils as share_utils
38 39
 from manila import utils
39 40
 
40 41
 LOG = logging.getLogger(__name__)
@@ -82,6 +83,27 @@ class ShareManager(manager.SchedulerDependentManager):
82 83
         self.driver = importutils.import_object(
83 84
             share_driver, self.db, configuration=self.configuration)
84 85
 
86
+    def _ensure_share_has_pool(self, ctxt, share):
87
+        pool = share_utils.extract_host(share['host'], 'pool')
88
+        if pool is None:
89
+            # No pool name encoded in host, so this is a legacy
90
+            # share created before pool is introduced, ask
91
+            # driver to provide pool info if it has such
92
+            # knowledge and update the DB.
93
+            try:
94
+                pool = self.driver.get_pool(share)
95
+            except Exception as err:
96
+                LOG.error(_LE("Failed to fetch pool name for share: "
97
+                              "%(share)s. Error: %(error)s."),
98
+                          {'share': share['id'], 'error': err})
99
+                return
100
+
101
+            if pool:
102
+                new_host = share_utils.append_host(share['host'], pool)
103
+                self.db.share_update(ctxt, share['id'], {'host': new_host})
104
+
105
+        return pool
106
+
85 107
     def init_host(self):
86 108
         """Initialization for a standalone service."""
87 109
 
@@ -93,6 +115,7 @@ class ShareManager(manager.SchedulerDependentManager):
93 115
         LOG.debug("Re-exporting %s shares", len(shares))
94 116
         for share in shares:
95 117
             if share['status'] == 'available':
118
+                self._ensure_share_has_pool(ctxt, share)
96 119
                 share_server = self._get_share_server(ctxt, share)
97 120
                 try:
98 121
                     self.driver.ensure_share(

+ 15
- 7
manila/share/rpcapi.py View File

@@ -21,6 +21,7 @@ from oslo_config import cfg
21 21
 from oslo_serialization import jsonutils
22 22
 
23 23
 from manila import rpc
24
+from manila.share import utils
24 25
 
25 26
 CONF = cfg.CONF
26 27
 
@@ -44,7 +45,8 @@ class ShareAPI(object):
44 45
     def create_share(self, ctxt, share, host,
45 46
                      request_spec, filter_properties,
46 47
                      snapshot_id=None):
47
-        cctxt = self.client.prepare(server=host, version='1.0')
48
+        new_host = utils.extract_host(host)
49
+        cctxt = self.client.prepare(server=new_host, version='1.0')
48 50
         request_spec_p = jsonutils.to_primitive(request_spec)
49 51
         cctxt.cast(
50 52
             ctxt,
@@ -56,15 +58,18 @@ class ShareAPI(object):
56 58
         )
57 59
 
58 60
     def delete_share(self, ctxt, share):
59
-        cctxt = self.client.prepare(server=share['host'], version='1.0')
61
+        host = utils.extract_host(share['host'])
62
+        cctxt = self.client.prepare(server=host, version='1.0')
60 63
         cctxt.cast(ctxt, 'delete_share', share_id=share['id'])
61 64
 
62 65
     def delete_share_server(self, ctxt, share_server):
63
-        cctxt = self.client.prepare(server=share_server['host'], version='1.0')
66
+        host = utils.extract_host(share_server['host'])
67
+        cctxt = self.client.prepare(server=host, version='1.0')
64 68
         cctxt.cast(ctxt, 'delete_share_server', share_server=share_server)
65 69
 
66 70
     def create_snapshot(self, ctxt, share, snapshot):
67
-        cctxt = self.client.prepare(server=share['host'])
71
+        host = utils.extract_host(share['host'])
72
+        cctxt = self.client.prepare(server=host)
68 73
         cctxt.cast(
69 74
             ctxt,
70 75
             'create_snapshot',
@@ -73,15 +78,18 @@ class ShareAPI(object):
73 78
         )
74 79
 
75 80
     def delete_snapshot(self, ctxt, snapshot, host):
76
-        cctxt = self.client.prepare(server=host)
81
+        new_host = utils.extract_host(host)
82
+        cctxt = self.client.prepare(server=new_host)
77 83
         cctxt.cast(ctxt, 'delete_snapshot', snapshot_id=snapshot['id'])
78 84
 
79 85
     def allow_access(self, ctxt, share, access):
80
-        cctxt = self.client.prepare(server=share['host'], version='1.0')
86
+        host = utils.extract_host(share['host'])
87
+        cctxt = self.client.prepare(server=host, version='1.0')
81 88
         cctxt.cast(ctxt, 'allow_access', access_id=access['id'])
82 89
 
83 90
     def deny_access(self, ctxt, share, access):
84
-        cctxt = self.client.prepare(server=share['host'], version='1.0')
91
+        host = utils.extract_host(share['host'])
92
+        cctxt = self.client.prepare(server=host, version='1.0')
85 93
         cctxt.cast(ctxt, 'deny_access', access_id=access['id'])
86 94
 
87 95
     def publish_service_capabilities(self, ctxt):

+ 75
- 0
manila/share/utils.py View File

@@ -0,0 +1,75 @@
1
+# Copyright (c) 2012 OpenStack Foundation
2
+# Copyright (c) 2015 Rushil Chugh
3
+# All Rights Reserved.
4
+#
5
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
6
+#    not use this file except in compliance with the License. You may obtain
7
+#    a copy of the License at
8
+#
9
+#         http://www.apache.org/licenses/LICENSE-2.0
10
+#
11
+#    Unless required by applicable law or agreed to in writing, software
12
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
13
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
14
+#    License for the specific language governing permissions and limitations
15
+#    under the License.
16
+
17
+
18
+"""Share-related Utilities and helpers."""
19
+
20
+DEFAULT_POOL_NAME = '_pool0'
21
+
22
+
23
+def extract_host(host, level='backend', use_default_pool_name=False):
24
+    """Extract Host, Backend or Pool information from host string.
25
+
26
+    :param host: String for host, which could include host@backend#pool info
27
+    :param level: Indicate which level of information should be extracted
28
+                  from host string. Level can be 'host', 'backend' or 'pool',
29
+                  default value is 'backend'
30
+    :param use_default_pool_name: This flag specifies what to do
31
+                              if level == 'pool' and there is no 'pool' info
32
+                              encoded in host string.  default_pool_name=True
33
+                              will return DEFAULT_POOL_NAME, otherwise it will
34
+                              return None. Default value of this parameter
35
+                              is False.
36
+    :return: expected level of information
37
+
38
+    For example:
39
+        host = 'HostA@BackendB#PoolC'
40
+        ret = extract_host(host, 'host')
41
+        # ret is 'HostA'
42
+        ret = extract_host(host, 'backend')
43
+        # ret is 'HostA@BackendB'
44
+        ret = extract_host(host, 'pool')
45
+        # ret is 'PoolC'
46
+
47
+        host = 'HostX@BackendY'
48
+        ret = extract_host(host, 'pool')
49
+        # ret is None
50
+        ret = extract_host(host, 'pool', True)
51
+        # ret is '_pool0'
52
+    """
53
+    if level == 'host':
54
+        # Make sure pool is not included
55
+        hst = host.split('#')[0]
56
+        return hst.split('@')[0]
57
+    elif level == 'backend':
58
+        return host.split('#')[0]
59
+    elif level == 'pool':
60
+        lst = host.split('#')
61
+        if len(lst) == 2:
62
+            return lst[1]
63
+        elif use_default_pool_name is True:
64
+            return DEFAULT_POOL_NAME
65
+        else:
66
+            return None
67
+
68
+
69
+def append_host(host, pool):
70
+    """Encode pool into host info."""
71
+    if not host or not pool:
72
+        return host
73
+
74
+    new_host = "#".join([host, pool])
75
+    return new_host

+ 13
- 0
manila/test.py View File

@@ -324,3 +324,16 @@ class TestCase(base_test.BaseTestCase):
324 324
             self.assertTrue(a is None)
325 325
         else:
326 326
             f(a, *args, **kwargs)
327
+
328
+    def _dict_from_object(self, obj, ignored_keys):
329
+        if ignored_keys is None:
330
+            ignored_keys = []
331
+        return dict([(k, v) for k, v in obj.iteritems()
332
+                     if k not in ignored_keys])
333
+
334
+    def _assertEqualListsOfObjects(self, objs1, objs2, ignored_keys=None):
335
+        obj_to_dict = lambda o: self._dict_from_object(o, ignored_keys)
336
+        sort_key = lambda d: [d[k] for k in sorted(d)]
337
+        conv_and_sort = lambda obj: sorted(map(obj_to_dict, obj), key=sort_key)
338
+
339
+        self.assertEqual(conv_and_sort(objs1), conv_and_sort(objs2))

+ 111
- 0
manila/tests/api/contrib/test_scheduler_stats.py View File

@@ -0,0 +1,111 @@
1
+# Copyright 2014 eBay Inc.
2
+# Copyright 2013 OpenStack Foundation
3
+# Copyright (c) 2015 Rushil Chugh
4
+# All Rights Reserved.
5
+#
6
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
7
+#    not use this file except in compliance with the License. You may obtain
8
+#    a copy of the License at
9
+#
10
+#         http://www.apache.org/licenses/LICENSE-2.0
11
+#
12
+#    Unless required by applicable law or agreed to in writing, software
13
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
14
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
15
+#    License for the specific language governing permissions and limitations
16
+#    under the License.
17
+
18
+import mock
19
+
20
+from manila.api.contrib import scheduler_stats
21
+from manila import context
22
+from manila import test
23
+from manila.tests.api import fakes
24
+
25
+
26
+def schedule_rpcapi_get_pools(self, context, filters=None):
27
+    all_pools = []
28
+    pool1 = dict(name='pool1',
29
+                 capabilities=dict(
30
+                     total_capacity=1024, free_capacity=100,
31
+                     share_backend_name='pool1', reserved_percentage=0,
32
+                     driver_version='1.0.0', storage_protocol='iSCSI',
33
+                     QoS_support='False', updated=None))
34
+    all_pools.append(pool1)
35
+    pool2 = dict(name='pool2',
36
+                 capabilities=dict(
37
+                     total_capacity=512, free_capacity=200,
38
+                     share_backend_name='pool2', reserved_percentage=0,
39
+                     driver_version='1.0.1', storage_protocol='iSER',
40
+                     QoS_support='True', updated=None))
41
+    all_pools.append(pool2)
42
+
43
+    return all_pools
44
+
45
+
46
+@mock.patch('manila.scheduler.rpcapi.SchedulerAPI.get_pools',
47
+            schedule_rpcapi_get_pools)
48
+class SchedulerStatsAPITest(test.TestCase):
49
+    def setUp(self):
50
+        super(SchedulerStatsAPITest, self).setUp()
51
+        self.flags(host='fake')
52
+        self.controller = scheduler_stats.SchedulerStatsController()
53
+        self.ctxt = context.RequestContext('admin', 'fake', True)
54
+
55
+    def test_get_pools_summery(self):
56
+        req = fakes.HTTPRequest.blank('/v2/fake/scheduler_stats')
57
+        req.environ['manila.context'] = self.ctxt
58
+        res = self.controller.get_pools(req)
59
+
60
+        self.assertEqual(2, len(res['pools']))
61
+
62
+        expected = {
63
+            'pools': [
64
+                {
65
+                    'name': 'pool1',
66
+                },
67
+                {
68
+                    'name': 'pool2',
69
+                }
70
+            ]
71
+        }
72
+
73
+        self.assertDictMatch(res, expected)
74
+
75
+    def test_get_pools_detail(self):
76
+        req = fakes.HTTPRequest.blank('/v2/fake/scheduler_stats?detail=True')
77
+        req.environ['manila.context'] = self.ctxt
78
+        res = self.controller.get_pools(req)
79
+
80
+        self.assertEqual(2, len(res['pools']))
81
+
82
+        expected = {
83
+            'pools': [
84
+                {
85
+                    'name': 'pool1',
86
+                    'capabilities': {
87
+                        'updated': None,
88
+                        'total_capacity': 1024,
89
+                        'free_capacity': 100,
90
+                        'share_backend_name': 'pool1',
91
+                        'reserved_percentage': 0,
92
+                        'driver_version': '1.0.0',
93
+                        'storage_protocol': 'iSCSI',
94
+                        'QoS_support': 'False', }
95
+                },
96
+                {
97
+                    'name': 'pool2',
98
+                    'capabilities': {
99
+                        'updated': None,
100
+                        'total_capacity': 512,
101
+                        'free_capacity': 200,
102
+                        'share_backend_name': 'pool2',
103
+                        'reserved_percentage': 0,
104
+                        'driver_version': '1.0.1',
105
+                        'storage_protocol': 'iSER',
106
+                        'QoS_support': 'True', }
107
+                }
108
+            ]
109
+        }
110
+
111
+        self.assertDictMatch(res, expected)

+ 0
- 0
manila/tests/db/sqlalchemy/__init__.py View File


+ 50
- 0
manila/tests/db/sqlalchemy/test_api.py View File

@@ -0,0 +1,50 @@
1
+# Copyright (c) 2015 Rushil Chugh
2
+# All Rights Reserved.
3
+#
4
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
5
+#    not use this file except in compliance with the License. You may obtain
6
+#    a copy of the License at
7
+#
8
+#         http://www.apache.org/licenses/LICENSE-2.0
9
+#
10
+#    Unless required by applicable law or agreed to in writing, software
11
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13
+#    License for the specific language governing permissions and limitations
14
+#    under the License.
15
+
16
+"""Testing of SQLAlchemy backend."""
17
+
18
+from manila import context
19
+from manila.db.sqlalchemy import api
20
+from manila import test
21
+
22
+
23
+class SQLAlchemyAPIShareTestCase(test.TestCase):
24
+
25
+    def setUp(self):
26
+        """Run before each test."""
27
+        super(SQLAlchemyAPIShareTestCase, self).setUp()
28
+        self.ctxt = context.get_admin_context()
29
+
30
+    def test_share_filter_by_host_with_pools(self):
31
+        shares = [[api.share_create(self.ctxt, {'host': value})
32
+                   for value in ('foo', 'foo#pool0')]]
33
+
34
+        api.share_create(self.ctxt, {'host': 'foobar'})
35
+        self._assertEqualListsOfObjects(shares[0],
36
+                                        api.share_get_all_by_host(
37
+                                            self.ctxt, 'foo'),
38
+                                        ignored_keys=['volume_type',
39
+                                                      'volume_type_id'])
40
+
41
+    def test_share_filter_all_by_host_with_pools_multiple_hosts(self):
42
+        shares = [[api.share_create(self.ctxt, {'host': value})
43
+                   for value in ('foo', 'foo#pool0', 'foo', 'foo#pool1')]]
44
+
45
+        api.share_create(self.ctxt, {'host': 'foobar'})
46
+        self._assertEqualListsOfObjects(shares[0],
47
+                                        api.share_get_all_by_host(
48
+                                            self.ctxt, 'foo'),
49
+                                        ignored_keys=['volume_type',
50
+                                                      'volume_type_id'])

+ 2
- 1
manila/tests/policy.json View File

@@ -40,5 +40,6 @@
40 40
      "security_service:index": [],
41 41
      "security_service:get_all_security_services": [["rule:admin_api"]],
42 42
 
43
-    "limits_extension:used_limits": []
43
+    "limits_extension:used_limits": [],
44
+    "scheduler_extension:scheduler_stats:get_pools" : "rule:admin_api"
44 45
 }

+ 6
- 3
manila/tests/scheduler/fakes.py View File

@@ -65,6 +65,10 @@ class FakeHostManager(host_manager.HostManager):
65 65
                       'free_capacity_gb': 200,
66 66
                       'reserved_percentage': 5,
67 67
                       'timestamp': None},
68
+            'host5': {'total_capacity_gb': 2048,
69
+                      'free_capacity_gb': 500,
70
+                      'reserved_percentage': 5,
71
+                      'timestamp': None},
68 72
         }
69 73
 
70 74
 
@@ -85,9 +89,8 @@ def mock_host_manager_db_calls(mock_obj, disabled=None):
85 89
              availability_zone='zone2', updated_at=timeutils.utcnow()),
86 90
         dict(id=4, host='host4', topic='share', disabled=False,
87 91
              availability_zone='zone3', updated_at=timeutils.utcnow()),
88
-        # service on host5 is disabled
89
-        dict(id=5, host='host5', topic='share', disabled=True,
90
-             availability_zone='zone4', updated_at=timeutils.utcnow()),
92
+        dict(id=5, host='host5', topic='share', disabled=False,
93
+             availability_zone='zone3', updated_at=timeutils.utcnow()),
91 94
     ]
92 95
     if disabled is None:
93 96
         mock_obj.return_value = services

+ 7
- 3
manila/tests/scheduler/test_capacity_weigher.py View File

@@ -22,6 +22,7 @@ from oslo_config import cfg
22 22
 from manila import context
23 23
 from manila.openstack.common.scheduler import weights
24 24
 from manila.scheduler.weights import capacity
25
+from manila.share import utils
25 26
 from manila import test
26 27
 from manila.tests.scheduler import fakes
27 28
 
@@ -64,7 +65,8 @@ class CapacityWeigherTestCase(test.TestCase):
64 65
         # so, host1 should win:
65 66
         weighed_host = self._get_weighed_host(hostinfo_list)
66 67
         self.assertEqual(weighed_host.weight, 1.0)
67
-        self.assertEqual(weighed_host.obj.host, 'host1')
68
+        self.assertEqual(
69
+            'host1', utils.extract_host(weighed_host.obj.host))
68 70
 
69 71
     def test_capacity_weight_multiplier1(self):
70 72
         self.flags(capacity_weight_multiplier=-1.0)
@@ -78,7 +80,8 @@ class CapacityWeigherTestCase(test.TestCase):
78 80
         # so, host4 should win:
79 81
         weighed_host = self._get_weighed_host(hostinfo_list)
80 82
         self.assertEqual(weighed_host.weight, 0.0)
81
-        self.assertEqual(weighed_host.obj.host, 'host4')
83
+        self.assertEqual(
84
+            'host4', utils.extract_host(weighed_host.obj.host))
82 85
 
83 86
     def test_capacity_weight_multiplier2(self):
84 87
         self.flags(capacity_weight_multiplier=2.0)
@@ -92,4 +95,5 @@ class CapacityWeigherTestCase(test.TestCase):
92 95
         # so, host1 should win:
93 96
         weighed_host = self._get_weighed_host(hostinfo_list)
94 97
         self.assertEqual(weighed_host.weight, 2.0)
95
-        self.assertEqual(weighed_host.obj.host, 'host1')
98
+        self.assertEqual(
99
+            'host1', utils.extract_host(weighed_host.obj.host))

+ 11
- 0
manila/tests/scheduler/test_host_filters.py View File

@@ -57,6 +57,17 @@ class HostFiltersTestCase(test.TestCase):
57 57
                                     'service': service})
58 58
         self.assertTrue(filt_cls.host_passes(host, filter_properties))
59 59
 
60
+    def test_capacity_filter_current_host_passes(self):
61
+        self._stub_service_is_up(True)
62
+        filt_cls = self.class_map['CapacityFilter']()
63
+        filter_properties = {'size': 100, 'share_exists_on': 'host1#pool1'}
64
+        service = {'disabled': False}
65
+        host = fakes.FakeHostState('host1#pools1',
66
+                                   {'free_capacity_gb': 200,
67
+                                    'updated_at': None,
68
+                                    'service': service})
69
+        self.assertTrue(filt_cls.host_passes(host, filter_properties))
70
+
60 71
     def test_capacity_filter_fails(self):
61 72
         self._stub_service_is_up(True)
62 73
         filt_cls = self.class_map['CapacityFilter']()

+ 267
- 60
manila/tests/scheduler/test_host_manager.py View File

@@ -1,4 +1,5 @@
1 1
 # Copyright (c) 2011 OpenStack, LLC
2
+# Copyright (c) 2015 Rushil Chugh
2 3
 # All Rights Reserved.
3 4
 #
4 5
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -15,8 +16,6 @@
15 16
 """
16 17
 Tests For HostManager
17 18
 """
18
-import datetime
19
-
20 19
 import mock
21 20
 from oslo_config import cfg
22 21
 from oslo_utils import timeutils
@@ -29,6 +28,7 @@ from manila.scheduler import host_manager
29 28
 from manila import test
30 29
 from manila.tests.scheduler import fakes
31 30
 
31
+
32 32
 CONF = cfg.CONF
33 33
 
34 34
 
@@ -63,7 +63,7 @@ class HostManagerTestCase(test.TestCase):
63 63
         self.host_manager.filter_classes = [FakeFilterClass1,
64 64
                                             FakeFilterClass2]
65 65
 
66
-        # Test 'volume' returns 1 correct function
66
+        # Test 'share' returns 1 correct function
67 67
         filter_classes = self.host_manager._choose_host_filters(None)
68 68
         self.assertEqual(1, len(filter_classes))
69 69
         self.assertEqual('FakeFilterClass2', filter_classes[0].__name__)
@@ -140,7 +140,6 @@ class HostManagerTestCase(test.TestCase):
140 140
         context = 'fake_context'
141 141
         topic = CONF.share_topic
142 142
         ret_services = fakes.SHARE_SERVICES
143
-
144 143
         with mock.patch.object(db, 'service_get_all_by_topic',
145 144
                                mock.Mock(return_value=ret_services)):
146 145
             # Disabled service
@@ -149,133 +148,341 @@ class HostManagerTestCase(test.TestCase):
149 148
 
150 149
             self.assertEqual(4, len(host_state_map))
151 150
             # Check that service is up
152
-            for i in moves.range(4):
151
+            for i in xrange(4):
153 152
                 share_node = fakes.SHARE_SERVICES[i]
154 153
                 host = share_node['host']
155 154
                 self.assertEqual(share_node, host_state_map[host].service)
156 155
             db.service_get_all_by_topic.assert_called_once_with(context, topic)
157 156
 
158
-    def test_get_all_host_states_share_after_host_status_change(self):
157
+    @mock.patch('manila.db.service_get_all_by_topic')
158
+    @mock.patch('manila.utils.service_is_up')
159
+    def test_get_pools(self, _mock_service_is_up,
160
+                       _mock_service_get_all_by_topic):
159 161
         context = 'fake_context'
160
-        ret_services = fakes.SHARE_SERVICES
161 162
 
162
-        with mock.patch.object(db, 'service_get_all_by_topic',
163
-                               mock.Mock(return_value=ret_services)):
164
-
165
-            self.host_manager.get_all_host_states_share(context)
166
-            host_state_map = self.host_manager.host_state_map
163
+        services = [
164
+            dict(id=1, host='host1', topic='share', disabled=False,
165
+                 availability_zone='zone1', updated_at=timeutils.utcnow()),
166
+            dict(id=2, host='host2@back1', topic='share', disabled=False,
167
+                 availability_zone='zone1', updated_at=timeutils.utcnow()),
168
+            dict(id=3, host='host2@back2', topic='share', disabled=False,
169
+                 availability_zone='zone2', updated_at=timeutils.utcnow()),
170
+        ]
171
+
172
+        mocked_service_states = {
173
+            'host1': dict(share_backend_name='AAA',
174
+                          total_capacity_gb=512, free_capacity_gb=200,
175
+                          timestamp=None, reserved_percentage=0),
176
+            'host2@back1': dict(share_backend_name='BBB',
177
+                                total_capacity_gb=256, free_capacity_gb=100,
178
+                                timestamp=None, reserved_percentage=0),
179
+            'host2@back2': dict(share_backend_name='CCC',
180
+                                total_capacity_gb=10000, free_capacity_gb=700,
181
+                                timestamp=None, reserved_percentage=0),
182
+        }
167 183
 
168
-            delta_time = datetime.timedelta(0, CONF.service_down_time + 10)
169
-            # disable host4
170
-            ret_services[3]['disabled'] = True
171
-            # down host3
172
-            ret_services[2]['updated_at'] -= delta_time
173
-            # disabled and down host2
174
-            ret_services[1]['disabled'] = True
175
-            ret_services[1]['updated_at'] -= delta_time
184
+        _mock_service_get_all_by_topic.return_value = services
185
+        _mock_service_is_up.return_value = True
186
+        _mock_warning = mock.Mock()
187
+        host_manager.LOG.warn = _mock_warning
176 188
 
189
+        with mock.patch.dict(self.host_manager.service_states,
190
+                             mocked_service_states):
191
+            # Call get_all_host_states to populate host_state_map
177 192
             self.host_manager.get_all_host_states_share(context)
178
-            host_state_map = self.host_manager.host_state_map
179 193
 
180
-            # only 1 host is up and active.
181
-            self.assertEqual(1, len(host_state_map))
182
-            # The up and active host is host1
183
-            share_node = fakes.SHARE_SERVICES[0]
184
-            host = share_node['host']
185
-            self.assertEqual(share_node, host_state_map[host].service)
194
+            res = self.host_manager.get_pools(context)
195
+
196
+            # Check if get_pools returns all 3 pools
197
+            self.assertEqual(3, len(res))
198
+
199
+            expected = [
200
+                {
201
+                    'name': 'host1#AAA',
202
+                    'capabilities': {
203
+                        'timestamp': None,
204
+                        'share_backend_name': 'AAA',
205
+                        'free_capacity_gb': 200,
206
+                        'driver_version': None,
207
+                        'total_capacity_gb': 512,
208
+                        'reserved_percentage': 0,
209
+                        'vendor_name': None,
210
+                        'storage_protocol': None},
211
+                },
212
+                {
213
+                    'name': 'host2@back1#BBB',
214
+                    'capabilities': {
215
+                        'timestamp': None,
216
+                        'share_backend_name': 'BBB',
217
+                        'free_capacity_gb': 100,
218
+                        'driver_version': None,
219
+                        'total_capacity_gb': 256,
220
+                        'reserved_percentage': 0,
221
+                        'vendor_name': None,
222
+                        'storage_protocol': None},
223
+                },
224
+                {
225
+                    'name': 'host2@back2#CCC',
226
+                    'capabilities': {
227
+                        'timestamp': None,
228
+                        'share_backend_name': 'CCC',
229
+                        'free_capacity_gb': 700,
230
+                        'driver_version': None,
231
+                        'total_capacity_gb': 10000,
232
+                        'reserved_percentage': 0,
233
+                        'vendor_name': None,
234
+                        'storage_protocol': None},
235
+                }
236
+            ]
237
+            self.assertEqual(len(expected), len(res))
238
+            self.assertEqual(sorted(expected), sorted(res))
186 239
 
187 240
 
188 241
 class HostStateTestCase(test.TestCase):
189 242
     """Test case for HostState class."""
190 243
 
191
-    def test_update_from_share_capability(self):
192
-        fake_host = host_manager.HostState('host1')
193
-        self.assertEqual(None, fake_host.free_capacity_gb)
194
-
195
-        share_capability = {'total_capacity_gb': 1024,
196
-                            'free_capacity_gb': 512,
244
+    def test_update_from_share_capability_nopool(self):
245
+        share_capability = {'total_capacity_gb': 0,
246
+                            'free_capacity_gb': 100,
197 247
                             'reserved_percentage': 0,
198 248
                             'timestamp': None}
249
+        fake_host = host_manager.HostState('host1', share_capability)
250
+        self.assertIsNone(fake_host.free_capacity_gb)
199 251
 
200 252
         fake_host.update_from_share_capability(share_capability)
201
-        self.assertEqual(512, fake_host.free_capacity_gb)
253
+        # Backend level stats remain uninitialized
254
+        self.assertEqual(0, fake_host.total_capacity_gb)
255
+        self.assertIsNone(fake_host.free_capacity_gb)
256
+        # Pool stats has been updated
257
+        self.assertEqual(0, fake_host.pools['_pool0'].total_capacity_gb)
258
+        self.assertEqual(100, fake_host.pools['_pool0'].free_capacity_gb)
259
+
260
+        # Test update for existing host state
261
+        share_capability.update(dict(total_capacity_gb=1000))
262
+        fake_host.update_from_share_capability(share_capability)
263
+        self.assertEqual(1000, fake_host.pools['_pool0'].total_capacity_gb)
202 264
 
203
-    def test_update_from_share_infinite_capability(self):
204
-        fake_host = host_manager.HostState('host1')
205
-        self.assertEqual(None, fake_host.free_capacity_gb)
265
+        # Test update for existing host state with different backend name
266
+        share_capability.update(dict(share_backend_name='magic'))
267
+        fake_host.update_from_share_capability(share_capability)
268
+        self.assertEqual(1000, fake_host.pools['magic'].total_capacity_gb)
269
+        self.assertEqual(100, fake_host.pools['magic'].free_capacity_gb)
270
+        # 'pool0' becomes nonactive pool, and is deleted
271
+        self.assertRaises(KeyError, lambda: fake_host.pools['pool0'])
272
+
273
+    def test_update_from_share_capability_with_pools(self):
274
+        fake_host = host_manager.HostState('host1#pool1')
275
+        self.assertIsNone(fake_host.free_capacity_gb)
276
+        capability = {
277
+            'share_backend_name': 'Backend1',
278
+            'vendor_name': 'OpenStack',
279
+            'driver_version': '1.1',
280
+            'storage_protocol': 'NFS_CIFS',
281
+            'pools': [
282
+                {'pool_name': 'pool1',
283
+                 'total_capacity_gb': 500,
284
+                 'free_capacity_gb': 230,
285
+                 'allocated_capacity_gb': 270,
286
+                 'QoS_support': 'False',
287
+                 'reserved_percentage': 0,
288
+                 'dying_disks': 100,
289
+                 'super_hero_1': 'spider-man',
290
+                 'super_hero_2': 'flash',
291
+                 'super_hero_3': 'neoncat',
292
+                 },
293
+                {'pool_name': 'pool2',
294
+                 'total_capacity_gb': 1024,
295
+                 'free_capacity_gb': 1024,
296
+                 'allocated_capacity_gb': 0,
297
+                 'QoS_support': 'False',
298
+                 'reserved_percentage': 0,
299
+                 'dying_disks': 200,
300
+                 'super_hero_1': 'superman',
301
+                 'super_hero_2': ' ',
302
+                 'super_hero_2': 'Hulk',
303
+                 }
304
+            ],
305
+            'timestamp': None,
306
+        }
307
+
308
+        fake_host.update_from_share_capability(capability)
309
+
310
+        self.assertEqual('Backend1', fake_host.share_backend_name)
311
+        self.assertEqual('NFS_CIFS', fake_host.storage_protocol)
312
+        self.assertEqual('OpenStack', fake_host.vendor_name)
313
+        self.assertEqual('1.1', fake_host.driver_version)
314
+
315
+        # Backend level stats remain uninitialized
316
+        self.assertEqual(0, fake_host.total_capacity_gb)
317
+        self.assertIsNone(fake_host.free_capacity_gb)
318
+        # Pool stats has been updated
319
+        self.assertEqual(2, len(fake_host.pools))
320
+
321
+        self.assertEqual(500, fake_host.pools['pool1'].total_capacity_gb)
322
+        self.assertEqual(230, fake_host.pools['pool1'].free_capacity_gb)
323
+        self.assertEqual(1024, fake_host.pools['pool2'].total_capacity_gb)
324
+        self.assertEqual(1024, fake_host.pools['pool2'].free_capacity_gb)
325
+
326
+        capability = {
327
+            'share_backend_name': 'Backend1',
328
+            'vendor_name': 'OpenStack',
329
+            'driver_version': '1.0',
330
+            'storage_protocol': 'NFS_CIFS',
331
+            'pools': [
332
+                {'pool_name': 'pool3',
333
+                 'total_capacity_gb': 10000,
334
+                 'free_capacity_gb': 10000,
335
+                 'allocated_capacity_gb': 0,
336
+                 'QoS_support': 'False',
337
+                 'reserved_percentage': 0,
338
+                 },
339
+            ],
340
+            'timestamp': None,
341
+        }
342
+
343
+        # test update HostState Record
344
+        fake_host.update_from_share_capability(capability)
206 345
 
346
+        self.assertEqual('1.0', fake_host.driver_version)
347
+
348
+        # Non-active pool stats has been removed
349
+        self.assertEqual(1, len(fake_host.pools))
350
+
351
+        self.assertRaises(KeyError, lambda: fake_host.pools['pool1'])
352
+        self.assertRaises(KeyError, lambda: fake_host.pools['pool2'])
353
+
354
+        self.assertEqual(10000, fake_host.pools['pool3'].total_capacity_gb)
355
+        self.assertEqual(10000, fake_host.pools['pool3'].free_capacity_gb)
356
+
357
+    def test_update_from_share_infinite_capability(self):
207 358
         share_capability = {'total_capacity_gb': 'infinite',
208 359
                             'free_capacity_gb': 'infinite',
209 360
                             'reserved_percentage': 0,
210 361
                             'timestamp': None}
362
+        fake_host = host_manager.HostState('host1#_pool0')
363
+        self.assertIsNone(fake_host.free_capacity_gb)
211 364
 
212 365
         fake_host.update_from_share_capability(share_capability)
213
-        self.assertEqual('infinite', fake_host.total_capacity_gb)
214
-        self.assertEqual('infinite', fake_host.free_capacity_gb)
366
+        # Backend level stats remain uninitialized
367
+        self.assertEqual(fake_host.total_capacity_gb, 0)
368
+        self.assertIsNone(fake_host.free_capacity_gb)
369
+        # Pool stats has been updated
370
+        self.assertEqual(fake_host.pools['_pool0'].total_capacity_gb,
371
+                         'infinite')
372
+        self.assertEqual(fake_host.pools['_pool0'].free_capacity_gb,
373
+                         'infinite')
215 374
 
216 375
     def test_update_from_share_unknown_capability(self):
217
-        fake_host = host_manager.HostState('host1')
218
-        self.assertEqual(None, fake_host.free_capacity_gb)
219
-
220 376
         share_capability = {
221 377
             'total_capacity_gb': 'infinite',
222 378
             'free_capacity_gb': 'unknown',
223 379
             'reserved_percentage': 0,
224 380
             'timestamp': None
225 381
         }
382
+        fake_host = host_manager.HostState('host1#_pool0')
383
+        self.assertIsNone(fake_host.free_capacity_gb)
226 384
 
227 385
         fake_host.update_from_share_capability(share_capability)
228
-        self.assertEqual('infinite', fake_host.total_capacity_gb)
229
-        self.assertEqual('unknown', fake_host.free_capacity_gb)
386
+        # Backend level stats remain uninitialized
387
+        self.assertEqual(fake_host.total_capacity_gb, 0)
388
+        self.assertIsNone(fake_host.free_capacity_gb)
389
+        # Pool stats has been updated
390
+        self.assertEqual(fake_host.pools['_pool0'].total_capacity_gb,
391
+                         'infinite')
392
+        self.assertEqual(fake_host.pools['_pool0'].free_capacity_gb,
393
+                         'unknown')
230 394
 
231 395
     def test_consume_from_share_capability(self):
232
-        fake_host = host_manager.HostState('host1')
233 396
         share_size = 10
234 397
         free_capacity = 100
235 398
         fake_share = {'id': 'foo', 'size': share_size}
236
-
237 399
         share_capability = {
238 400
             'total_capacity_gb': free_capacity * 2,
239 401
             'free_capacity_gb': free_capacity,
240 402
             'reserved_percentage': 0,
241 403
             'timestamp': None
242 404
         }
405
+        fake_host = host_manager.PoolState('host1', share_capability, '_pool0')
243 406
 
244 407
         fake_host.update_from_share_capability(share_capability)
245 408
         fake_host.consume_from_share(fake_share)
246
-        self.assertEqual(free_capacity - share_size,
247
-                         fake_host.free_capacity_gb)
409
+        self.assertEqual(fake_host.free_capacity_gb,
410
+                         free_capacity - share_size)
248 411
 
249 412
     def test_consume_from_share_infinite_capability(self):
250
-        fake_host = host_manager.HostState('host1')
251
-        share_size = 1000
252
-        fake_share = {'id': 'foo', 'size': share_size}
253
-
254 413
         share_capability = {
255 414
             'total_capacity_gb': 'infinite',
256 415
             'free_capacity_gb': 'infinite',
257 416
             'reserved_percentage': 0,
258 417
             'timestamp': None
259 418
         }
419
+        fake_host = host_manager.PoolState('host1', share_capability, '_pool0')
420
+        share_size = 1000
421
+        fake_share = {'id': 'foo', 'size': share_size}
260 422
 
261 423
         fake_host.update_from_share_capability(share_capability)
262 424
         fake_host.consume_from_share(fake_share)
263
-        self.assertEqual('infinite', fake_host.total_capacity_gb)
264
-        self.assertEqual('infinite', fake_host.free_capacity_gb)
425
+        self.assertEqual(fake_host.total_capacity_gb, 'infinite')
426
+        self.assertEqual(fake_host.free_capacity_gb, 'infinite')
265 427
 
266 428
     def test_consume_from_share_unknown_capability(self):
267
-        fake_host = host_manager.HostState('host1')
268
-        share_size = 1000
269
-        fake_share = {'id': 'foo', 'size': share_size}
270
-
271 429
         share_capability = {
272 430
             'total_capacity_gb': 'infinite',
273 431
             'free_capacity_gb': 'unknown',
274 432
             'reserved_percentage': 0,
275 433
             'timestamp': None
276 434
         }
435
+        fake_host = host_manager.PoolState('host1', share_capability, '_pool0')
436
+        share_size = 1000
437
+        fake_share = {'id': 'foo', 'size': share_size}
277 438
 
278 439
         fake_host.update_from_share_capability(share_capability)
279 440
         fake_host.consume_from_share(fake_share)
280
-        self.assertEqual('infinite', fake_host.total_capacity_gb)
281
-        self.assertEqual('unknown', fake_host.free_capacity_gb)
441
+        self.assertEqual(fake_host.total_capacity_gb, 'infinite')
442
+        self.assertEqual(fake_host.free_capacity_gb, 'unknown')
443
+
444
+    def test_repr(self):
445
+
446
+        capability = {
447
+            'share_backend_name': 'Backend1',
448
+            'vendor_name': 'OpenStack',
449
+            'driver_version': '1.0',
450
+            'storage_protocol': 'NFS_CIFS',
451
+            'total_capacity_gb': 20000,
452
+            'free_capacity_gb': 15000,
453
+            'allocated_capacity_gb': 5000,
454
+            'timestamp': None,
455
+            'reserved_percentage': 0,
456
+        }
457
+        fake_host = host_manager.HostState('host1')
458
+        fake_host.update_from_share_capability(capability)
459
+
460
+        result = fake_host.__repr__()
461
+        expected = "host: 'host1', free_capacity_gb: None, " \
462
+                   "pools: {'Backend1': host: 'host1#Backend1', " \
463
+                   "free_capacity_gb: 15000, pools: None}"
464
+        self.assertEqual(expected, result)
465
+
466
+
467
+class PoolStateTestCase(test.TestCase):
468
+    """Test case for HostState class."""
469
+
470
+    def test_update_from_share_capability(self):
471
+        share_capability = {
472
+            'total_capacity_gb': 1024,
473
+            'free_capacity_gb': 512,
474
+            'reserved_percentage': 0,
475
+            'timestamp': None,
476
+            'cap1': 'val1',
477
+            'cap2': 'val2'
478
+        }
479
+        fake_pool = host_manager.PoolState('host1', None, 'pool0')
480
+        self.assertIsNone(fake_pool.free_capacity_gb)
481
+
482
+        fake_pool.update_from_share_capability(share_capability)
483
+        self.assertEqual(fake_pool.host, 'host1#pool0')
484
+        self.assertEqual(fake_pool.pool_name, 'pool0')
485
+        self.assertEqual(fake_pool.total_capacity_gb, 1024)
486
+        self.assertEqual(fake_pool.free_capacity_gb, 512)
487
+
488
+        self.assertDictMatch(fake_pool.capabilities, share_capability)

+ 7
- 1
manila/tests/scheduler/test_rpcapi.py View File

@@ -43,7 +43,7 @@ class SchedulerRpcAPITestCase(test.TestCase):
43 43
 
44 44
         target = {
45 45
             "fanout": fanout,
46
-            "version": kwargs.pop('version', rpcapi.RPC_API_VERSION),
46
+            "version": kwargs.pop('version', '1.0'),
47 47
         }
48 48
         expected_msg = copy.deepcopy(kwargs)
49 49
 
@@ -89,3 +89,9 @@ class SchedulerRpcAPITestCase(test.TestCase):
89 89
                                  request_spec='fake_request_spec',
90 90
                                  filter_properties='filter_properties',
91 91
                                  version='1.0')
92
+
93
+    def test_get_pools(self):
94
+        self._test_scheduler_api('get_pools',
95
+                                 rpc_method='call',
96
+                                 filters=None,
97
+                                 version='1.1')

+ 10
- 0
manila/tests/scheduler/test_scheduler.py View File

@@ -97,6 +97,16 @@ class SchedulerManagerTestCase(test.TestCase):
97 97
             self.manager.driver.schedule_create_share.assert_called_once_with(
98 98
                 self.context, request_spec, {})
99 99
 
100
+    def test_get_pools(self):
101
+        """Ensure get_pools exists and calls driver.get_pools."""
102
+        mock_get_pools = self.mock_object(self.manager.driver, 'get_pools',
103
+                                          mock.Mock(return_value='fake_pools'))
104
+
105
+        result = self.manager.get_pools(self.context, filters='fake_filters')
106
+
107
+        mock_get_pools.assert_called_once_with(self.context, 'fake_filters')
108
+        self.assertEqual('fake_pools', result)
109
+
100 110
 
101 111
 class SchedulerTestCase(test.TestCase):
102 112
     """Test case for base scheduler driver class."""

+ 41
- 0
manila/tests/share/test_manager.py View File

@@ -168,6 +168,7 @@ class ShareManagerTestCase(test.TestCase):
168 168
                          'share_get_all_by_host',
169 169
                          mock.Mock(return_value=shares))
170 170
         self.mock_object(self.share_manager.driver, 'ensure_share')
171
+        self.mock_object(self.share_manager, '_ensure_share_has_pool')
171 172
         self.mock_object(self.share_manager, '_get_share_server',
172 173
                          mock.Mock(return_value=share_server))
173 174
         self.mock_object(self.share_manager, 'publish_service_capabilities',
@@ -188,6 +189,9 @@ class ShareManagerTestCase(test.TestCase):
188 189
             utils.IsAMatcher(context.RequestContext))
189 190
         self.share_manager.driver.check_for_setup_error.\
190 191
             assert_called_once_with()
192
+        self.share_manager._ensure_share_has_pool.\
193
+            assert_called_once_with(utils.IsAMatcher(context.RequestContext),
194
+                                    shares[0])
191 195
         self.share_manager._get_share_server.assert_called_once_with(
192 196
             utils.IsAMatcher(context.RequestContext), shares[0])
193 197
         self.share_manager.driver.ensure_share.assert_called_once_with(
@@ -218,6 +222,7 @@ class ShareManagerTestCase(test.TestCase):
218 222
                          mock.Mock(return_value=shares))
219 223
         self.mock_object(self.share_manager.driver, 'ensure_share',
220 224
                          mock.Mock(side_effect=raise_exception))
225
+        self.mock_object(self.share_manager, '_ensure_share_has_pool')
221 226
         self.mock_object(self.share_manager, '_get_share_server',
222 227
                          mock.Mock(return_value=share_server))
223 228
         self.mock_object(self.share_manager, 'publish_service_capabilities')
@@ -233,6 +238,10 @@ class ShareManagerTestCase(test.TestCase):
233 238
         self.share_manager.driver.do_setup.assert_called_once_with(
234 239
             utils.IsAMatcher(context.RequestContext))
235 240
         self.share_manager.driver.check_for_setup_error.assert_called_with()
241
+        self.share_manager._ensure_share_has_pool.assert_has_calls([
242
+            mock.call(utils.IsAMatcher(context.RequestContext), shares[0]),
243
+            mock.call(utils.IsAMatcher(context.RequestContext), shares[2]),
244
+        ])
236 245
         self.share_manager._get_share_server.assert_has_calls([
237 246
             mock.call(utils.IsAMatcher(context.RequestContext), shares[0]),
238 247
             mock.call(utils.IsAMatcher(context.RequestContext), shares[2]),
@@ -269,6 +278,7 @@ class ShareManagerTestCase(test.TestCase):
269 278
                          'share_get_all_by_host',
270 279
                          mock.Mock(return_value=shares))
271 280
         self.mock_object(self.share_manager.driver, 'ensure_share')
281
+        self.mock_object(self.share_manager, '_ensure_share_has_pool')
272 282
         self.mock_object(self.share_manager, '_get_share_server',
273 283
                          mock.Mock(return_value=share_server))
274 284
         self.mock_object(self.share_manager, 'publish_service_capabilities')
@@ -289,6 +299,10 @@ class ShareManagerTestCase(test.TestCase):
289 299
         self.share_manager.driver.do_setup.assert_called_once_with(
290 300
             utils.IsAMatcher(context.RequestContext))
291 301
         self.share_manager.driver.check_for_setup_error.assert_called_with()
302
+        self.share_manager._ensure_share_has_pool.assert_has_calls([
303
+            mock.call(utils.IsAMatcher(context.RequestContext), shares[0]),
304
+            mock.call(utils.IsAMatcher(context.RequestContext), shares[2]),
305
+        ])
292 306
         self.share_manager._get_share_server.assert_has_calls([
293 307
             mock.call(utils.IsAMatcher(context.RequestContext), shares[0]),
294 308
             mock.call(utils.IsAMatcher(context.RequestContext), shares[2]),
@@ -960,3 +974,30 @@ class ShareManagerTestCase(test.TestCase):
960 974
 
961 975
     def test_setup_server_exception_in_driver(self):
962 976
         self.setup_server_raise_exception(detail_data_proper=True)
977
+
978
+    def test_ensure_share_has_pool_with_only_host(self):
979
+        fake_share = {'status': 'available', 'host': 'host1', 'id': 1}
980
+        host = self.share_manager._ensure_share_has_pool(context.
981
+                                                         get_admin_context(),
982
+                                                         fake_share)
983
+        self.assertIsNone(host)
984
+
985
+    def test_ensure_share_has_pool_with_full_pool_name(self):
986
+        fake_share = {'host': 'host1#pool0', 'id': 1,
987
+                      'status': 'available'}
988
+        fake_share_expected_value = 'pool0'
989
+        host = self.share_manager._ensure_share_has_pool(context.
990
+                                                         get_admin_context(),
991
+                                                         fake_share)
992
+        self.assertEqual(fake_share_expected_value, host)
993
+
994
+    def test_ensure_share_has_pool_unable_to_fetch_share(self):
995
+        fake_share = {'host': 'host@backend', 'id': 1,
996
+                      'status': 'available'}
997
+        with mock.patch.object(self.share_manager.driver, 'get_pool',
998
+                               side_effect=Exception):
999
+            with mock.patch.object(manager, 'LOG') as mock_LOG:
1000
+                self.share_manager._ensure_share_has_pool(context.
1001
+                                                          get_admin_context(),
1002
+                                                          fake_share)
1003
+                self.assertEqual(1, mock_LOG.error.call_count)

+ 130
- 0
manila/tests/share/test_share_utils.py View File

@@ -0,0 +1,130 @@
1
+# Copyright 2011 OpenStack Foundation
2
+# Copyright (c) 2015 Rushil Chugh
3
+# All Rights Reserved.
4
+#
5
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
6
+#    not use this file except in compliance with the License. You may obtain
7
+#    a copy of the License at
8
+#
9
+#         http://www.apache.org/licenses/LICENSE-2.0
10
+#
11
+#    Unless required by applicable law or agreed to in writing, software
12
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
13
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
14
+#    License for the specific language governing permissions and limitations
15
+#    under the License.
16
+
17
+"""Tests For miscellaneous util methods used with share."""
18
+
19
+from manila.share import utils as share_utils
20
+from manila import test
21
+
22
+
23
+class ShareUtilsTestCase(test.TestCase):
24
+    def test_extract_host_without_pool(self):
25
+        host = 'Host@Backend'
26
+        self.assertEqual(
27
+            'Host@Backend', share_utils.extract_host(host))
28
+
29
+    def test_extract_host_only_return_host(self):
30
+        host = 'Host@Backend'
31
+        self.assertEqual(
32
+            'Host', share_utils.extract_host(host, 'host'))
33
+
34
+    def test_extract_host_only_return_pool(self):
35
+        host = 'Host@Backend'
36
+        self.assertEqual(
37
+            None, share_utils.extract_host(host, 'pool'))
38
+
39
+    def test_extract_host_only_return_backend(self):
40
+        host = 'Host@Backend'
41
+        self.assertEqual(
42
+            'Host@Backend', share_utils.extract_host(host, 'backend'))
43
+
44
+    def test_extract_host_missing_backend_and_pool(self):
45
+        host = 'Host'
46
+        # Default level is 'backend'
47
+        self.assertEqual(
48
+            'Host', share_utils.extract_host(host))
49
+
50
+    def test_extract_host_missing_backend(self):
51
+        host = 'Host#Pool'
52
+        self.assertEqual(
53
+            'Host', share_utils.extract_host(host))
54
+        self.assertEqual(
55
+            'Host', share_utils.extract_host(host, 'host'))
56
+
57
+    def test_extract_host_missing_backend_only_return_backend(self):
58
+        host = 'Host#Pool'
59
+        self.assertEqual(
60
+            'Host', share_utils.extract_host(host, 'backend'))
61
+
62
+    def test_extract_host_missing_backend_only_return_pool(self):
63
+        host = 'Host#Pool'
64
+        self.assertEqual(
65
+            'Pool', share_utils.extract_host(host, 'pool'))
66
+        self.assertEqual(
67
+            'Pool', share_utils.extract_host(host, 'pool', True))
68
+
69
+    def test_extract_host_missing_pool(self):
70
+        host = 'Host@Backend'
71
+        self.assertEqual(
72
+            None, share_utils.extract_host(host, 'pool'))
73
+
74
+    def test_extract_host_missing_pool_use_default_pool(self):
75
+        host = 'Host@Backend'
76
+        self.assertEqual(
77
+            '_pool0', share_utils.extract_host(host, 'pool', True))
78
+
79
+    def test_extract_host_with_default_pool(self):
80
+        host = 'Host'
81
+        # Default_pool_name doesn't work for level other than 'pool'
82
+        self.assertEqual(
83
+            'Host', share_utils.extract_host(host, 'host', True))
84
+        self.assertEqual(
85
+            'Host', share_utils.extract_host(host, 'host', False))
86
+        self.assertEqual(
87
+            'Host', share_utils.extract_host(host, 'backend', True))
88
+        self.assertEqual(
89
+            'Host', share_utils.extract_host(host, 'backend', False))
90
+
91
+    def test_extract_host_with_pool(self):
92
+        host = 'Host@Backend#Pool'
93
+        self.assertEqual(
94
+            'Host@Backend', share_utils.extract_host(host))
95
+        self.assertEqual(
96
+            'Host', share_utils.extract_host(host, 'host'))
97
+        self.assertEqual(
98
+            'Host@Backend', share_utils.extract_host(host, 'backend'),)
99
+        self.assertEqual(
100
+            'Pool', share_utils.extract_host(host, 'pool'))
101
+        self.assertEqual(
102
+            'Pool', share_utils.extract_host(host, 'pool', True))
103
+
104
+    def test_append_host_with_host_and_pool(self):
105
+        host = 'Host'
106
+        pool = 'Pool'
107
+        expected = 'Host#Pool'
108
+        self.assertEqual(expected,
109
+                         share_utils.append_host(host, pool))
110
+
111
+    def test_append_host_with_host(self):
112
+        host = 'Host'
113
+        pool = None
114
+        expected = 'Host'
115
+        self.assertEqual(expected,
116
+                         share_utils.append_host(host, pool))
117
+
118
+    def test_append_host_with_pool(self):
119
+        host = None
120
+        pool = 'pool'
121
+        expected = None
122
+        self.assertEqual(expected,
123
+                         share_utils.append_host(host, pool))
124
+
125
+    def test_append_host_with_no_values(self):
126
+        host = None
127
+        pool = None
128
+        expected = None
129
+        self.assertEqual(expected,
130
+                         share_utils.append_host(host, pool))

+ 2
- 0
manila/tests/test_quota.py View File

@@ -52,6 +52,7 @@ class QuotaIntegrationTestCase(test.TestCase):
52 52
         share['project_id'] = self.project_id
53 53
         share['size'] = size
54 54
         share['status'] = 'available'
55
+        share['host'] = 'fake_host'
55 56
         return db.share_create(self.context, share)
56 57
 
57 58
     def _create_snapshot(self, share):
@@ -60,6 +61,7 @@ class QuotaIntegrationTestCase(test.TestCase):
60 61
         snapshot['project_id'] = self.project_id
61 62
         snapshot['share_id'] = share['id']
62 63
         snapshot['share_size'] = share['size']
64
+        snapshot['host'] = share['host']
63 65
         snapshot['status'] = 'available'
64 66
         return db.share_snapshot_create(self.context, snapshot)
65 67
 

Loading…
Cancel
Save