Browse Source

Enable some off-by-default checks

Some of the available checks are disabled by default, like:
[H106] Don't put vim configuration in source files
[H203] Use assertIs(Not)None to check for None
[H904] Use ',' instead of '%', String interpolation should be
       delayed to be handled by the logging code, rather than
       being done at the point of the logging call.

Change-Id: Ie985fcf78997a86d41e40eacbb4a5ace8592a348
junboli 1 year ago
parent
commit
fb17422c86
47 changed files with 108 additions and 101 deletions
  1. 3
    3
      contrib/share_driver_hooks/zaqar_notification.py
  2. 3
    3
      manila/api/openstack/wsgi.py
  3. 1
    1
      manila/db/migrations/alembic/versions/3651e16d7c43_add_consistency_groups.py
  4. 1
    1
      manila/db/migrations/alembic/versions/e1949a93157a_add_share_group_types_table.py
  5. 3
    3
      manila/db/migrations/alembic/versions/e8ea58723178_remove_host_from_driver_private_data.py
  6. 3
    3
      manila/message/api.py
  7. 4
    4
      manila/scheduler/drivers/filter.py
  8. 3
    3
      manila/scheduler/filters/ignore_attempted_hosts.py
  9. 2
    2
      manila/scheduler/host_manager.py
  10. 1
    1
      manila/service.py
  11. 4
    4
      manila/share/api.py
  12. 4
    4
      manila/share/driver.py
  13. 4
    4
      manila/share/drivers/container/driver.py
  14. 1
    1
      manila/share/drivers/container/protocol_helper.py
  15. 1
    1
      manila/share/drivers/container/storage_helper.py
  16. 2
    2
      manila/share/drivers/generic.py
  17. 1
    1
      manila/share/drivers/glusterfs/common.py
  18. 4
    4
      manila/share/drivers/helpers.py
  19. 1
    1
      manila/share/drivers/hitachi/hnas/ssh.py
  20. 1
    1
      manila/share/drivers/huawei/v3/helper.py
  21. 2
    2
      manila/share/drivers/ibm/gpfs.py
  22. 1
    1
      manila/share/drivers/lvm.py
  23. 2
    2
      manila/share/drivers/netapp/common.py
  24. 4
    4
      manila/share/drivers/netapp/dataontap/client/client_cmode.py
  25. 3
    3
      manila/share/drivers/netapp/dataontap/cluster_mode/lib_base.py
  26. 1
    1
      manila/share/drivers/netapp/dataontap/cluster_mode/lib_multi_svm.py
  27. 1
    1
      manila/share/drivers/netapp/dataontap/cluster_mode/lib_single_svm.py
  28. 3
    3
      manila/share/drivers/netapp/dataontap/protocols/nfs_cmode.py
  29. 5
    5
      manila/share/drivers/netapp/utils.py
  30. 1
    1
      manila/share/drivers/quobyte/jsonrpc.py
  31. 4
    4
      manila/share/drivers/quobyte/quobyte.py
  32. 2
    2
      manila/share/drivers/service_instance.py
  33. 3
    3
      manila/share/drivers/windows/windows_smb_helper.py
  34. 1
    1
      manila/share/drivers/zfsonlinux/driver.py
  35. 2
    2
      manila/share/manager.py
  36. 1
    1
      manila/tests/fake_utils.py
  37. 2
    1
      manila/tests/message/test_api.py
  38. 8
    8
      manila/tests/share/drivers/dummy.py
  39. 2
    1
      manila/tests/share/drivers/quobyte/test_quobyte.py
  40. 1
    1
      manila_tempest_tests/common/remote_client.py
  41. 2
    2
      manila_tempest_tests/tests/api/base.py
  42. 1
    1
      manila_tempest_tests/tests/api/test_security_services.py
  43. 1
    1
      manila_tempest_tests/tests/api/test_security_services_mapping_negative.py
  44. 1
    1
      manila_tempest_tests/tests/api/test_security_services_negative.py
  45. 1
    1
      manila_tempest_tests/tests/scenario/manager_share.py
  46. 1
    1
      manila_tempest_tests/tests/scenario/test_share_basic_ops.py
  47. 5
    0
      tox.ini

+ 3
- 3
contrib/share_driver_hooks/zaqar_notification.py View File

@@ -78,7 +78,7 @@ class ZaqarNotification(hook.HookBase):
78 78
 
79 79
     def _execute_pre_hook(self, context, func_name, *args, **kwargs):
80 80
         LOG.debug("\n PRE zaqar notification has been called for "
81
-                  "method '%s'.\n" % func_name)
81
+                  "method '%s'.\n", func_name)
82 82
         if func_name == "deny_access":
83 83
             LOG.debug("\nSending notification about denied access.\n")
84 84
             data = self._access_changed_trigger(
@@ -92,7 +92,7 @@ class ZaqarNotification(hook.HookBase):
92 92
     def _execute_post_hook(self, context, func_name, pre_hook_data,
93 93
                            driver_action_results, *args, **kwargs):
94 94
         LOG.debug("\n POST zaqar notification has been called for "
95
-                  "method '%s'.\n" % func_name)
95
+                  "method '%s'.\n", func_name)
96 96
         if func_name == "allow_access":
97 97
             LOG.debug("\nSending notification about allowed access.\n")
98 98
             data = self._access_changed_trigger(
@@ -115,7 +115,7 @@ class ZaqarNotification(hook.HookBase):
115 115
             }
116 116
             LOG.debug(
117 117
                 "\n Sending message %(m)s to '%(q)s' queue using '%(u)s' user "
118
-                "and '%(p)s' project." % {
118
+                "and '%(p)s' project.", {
119 119
                     'm': message,
120 120
                     'q': queue_name,
121 121
                     'u': CONF.zaqar.zaqar_username,

+ 3
- 3
manila/api/openstack/wsgi.py View File

@@ -733,8 +733,8 @@ class Resource(wsgi.Application):
733 733
     def __call__(self, request):
734 734
         """WSGI method that controls (de)serialization and method dispatch."""
735 735
 
736
-        LOG.info("%(method)s %(url)s" % {"method": request.method,
737
-                                         "url": request.url})
736
+        LOG.info("%(method)s %(url)s", {"method": request.method,
737
+                                        "url": request.url})
738 738
         if self.support_api_request_version:
739 739
             # Set the version of the API requested based on the header
740 740
             try:
@@ -900,7 +900,7 @@ class Resource(wsgi.Application):
900 900
             # OK, it's an action; figure out which action...
901 901
             mtype = _MEDIA_TYPE_MAP.get(content_type)
902 902
             action_name = self.action_peek[mtype](body)
903
-            LOG.debug("Action body: %s" % body)
903
+            LOG.debug("Action body: %s", body)
904 904
         else:
905 905
             action_name = action
906 906
 

+ 1
- 1
manila/db/migrations/alembic/versions/3651e16d7c43_add_consistency_groups.py View File

@@ -182,7 +182,7 @@ def downgrade():
182 182
                            'shares',
183 183
                            type_='foreignkey')
184 184
     except Exception:
185
-        LOG.exception("Error Dropping '%s' constraint." %
185
+        LOG.exception("Error Dropping '%s' constraint.",
186 186
                       SHARES_CG_FK_CONSTRAINT_NAME)
187 187
 
188 188
     try:

+ 1
- 1
manila/db/migrations/alembic/versions/e1949a93157a_add_share_group_types_table.py View File

@@ -141,5 +141,5 @@ def downgrade():
141 141
         try:
142 142
             op.drop_table(table_name)
143 143
         except Exception:
144
-            LOG.error("%s table not dropped" % table_name)
144
+            LOG.error("%s table not dropped", table_name)
145 145
             raise

+ 3
- 3
manila/db/migrations/alembic/versions/e8ea58723178_remove_host_from_driver_private_data.py View File

@@ -76,7 +76,7 @@ def downgrade():
76 76
     )
77 77
 
78 78
     LOG.info("Copying data from %(from_table)s to the migration "
79
-             "table %(migration_table)s" % {
79
+             "table %(migration_table)s", {
80 80
                  'from_table': TABLE_NAME,
81 81
                  'migration_table': migration_table_name
82 82
              })
@@ -94,13 +94,13 @@ def downgrade():
94 94
         })
95 95
     op.bulk_insert(migration_table, rows)
96 96
 
97
-    LOG.info("Dropping table %(from_table)s" % {
97
+    LOG.info("Dropping table %(from_table)s", {
98 98
         'from_table': TABLE_NAME
99 99
     })
100 100
     op.drop_table(TABLE_NAME)
101 101
 
102 102
     LOG.info("Rename the migration table %(migration_table)s to "
103
-             "the original table %(from_table)s" % {
103
+             "the original table %(from_table)s", {
104 104
                  'migration_table': migration_table_name,
105 105
                  'from_table': TABLE_NAME
106 106
              })

+ 3
- 3
manila/message/api.py View File

@@ -45,7 +45,7 @@ class API(base.Base):
45 45
                resource_id=None, exception=None, detail=None,
46 46
                level=message_levels.ERROR):
47 47
         """Create a message with the specified information."""
48
-        LOG.info("Creating message record for request_id = %s" %
48
+        LOG.info("Creating message record for request_id = %s",
49 49
                  context.request_id)
50 50
 
51 51
         # Updates expiry time for message as per message_ttl config.
@@ -66,8 +66,8 @@ class API(base.Base):
66 66
         try:
67 67
             self.db.message_create(context, message_record)
68 68
         except Exception:
69
-            LOG.exception("Failed to create message record "
70
-                          "for request_id %s" % context.request_id)
69
+            LOG.exception(("Failed to create message record "
70
+                           "for request_id %s"), context.request_id)
71 71
 
72 72
     def get(self, context, id):
73 73
         """Return message with the specified message id."""

+ 4
- 4
manila/scheduler/drivers/filter.py View File

@@ -306,7 +306,7 @@ class FilterScheduler(base.Scheduler):
306 306
     def schedule_create_share_group(self, context, share_group_id,
307 307
                                     request_spec, filter_properties):
308 308
 
309
-        LOG.info("Scheduling share group %s." % share_group_id)
309
+        LOG.info("Scheduling share group %s.", share_group_id)
310 310
         host = self._get_best_host_for_share_group(context, request_spec)
311 311
 
312 312
         if not host:
@@ -314,7 +314,7 @@ class FilterScheduler(base.Scheduler):
314 314
             raise exception.NoValidHost(reason=msg)
315 315
 
316 316
         msg = "Chose host %(host)s for create_share_group %(group)s."
317
-        LOG.info(msg % {'host': host, 'group': share_group_id})
317
+        LOG.info(msg, {'host': host, 'group': share_group_id})
318 318
 
319 319
         updated_share_group = base.share_group_update_db(
320 320
             context, share_group_id, host)
@@ -361,7 +361,7 @@ class FilterScheduler(base.Scheduler):
361 361
         if not hosts:
362 362
             return []
363 363
 
364
-        LOG.debug("Filtered %s" % hosts)
364
+        LOG.debug("Filtered %s", hosts)
365 365
 
366 366
         # weighted_host = WeightedHost() ... the best host for the job.
367 367
         weighed_hosts = self.host_manager.get_weighed_hosts(
@@ -396,7 +396,7 @@ class FilterScheduler(base.Scheduler):
396 396
         if not hosts:
397 397
             return []
398 398
 
399
-        LOG.debug("Filtered %s" % hosts)
399
+        LOG.debug("Filtered %s", hosts)
400 400
 
401 401
         weighed_hosts = self.host_manager.get_weighed_hosts(
402 402
             hosts,

+ 3
- 3
manila/scheduler/filters/ignore_attempted_hosts.py View File

@@ -51,7 +51,7 @@ class IgnoreAttemptedHostsFilter(base_host.BaseHostFilter):
51 51
         pass_msg = "passes" if passes else "fails"
52 52
 
53 53
         LOG.debug("Host %(host)s %(pass_msg)s.  Previously tried hosts: "
54
-                  "%(hosts)s" % {'host': host,
55
-                                 'pass_msg': pass_msg,
56
-                                 'hosts': hosts})
54
+                  "%(hosts)s", {'host': host,
55
+                                'pass_msg': pass_msg,
56
+                                'hosts': hosts})
57 57
         return passes

+ 2
- 2
manila/scheduler/host_manager.py View File

@@ -562,7 +562,7 @@ class HostManager(object):
562 562
         self.service_states[host] = capability_copy
563 563
 
564 564
         LOG.debug("Received %(service_name)s service update from "
565
-                  "%(host)s: %(cap)s" %
565
+                  "%(host)s: %(cap)s",
566 566
                   {'service_name': service_name, 'host': host,
567 567
                    'cap': capabilities})
568 568
 
@@ -578,7 +578,7 @@ class HostManager(object):
578 578
 
579 579
             # Warn about down services and remove them from host_state_map
580 580
             if not utils.service_is_up(service) or service['disabled']:
581
-                LOG.warning("Share service is down. (host: %s)." % host)
581
+                LOG.warning("Share service is down. (host: %s).", host)
582 582
                 continue
583 583
 
584 584
             # Create and register host_state if not in host_state_map

+ 1
- 1
manila/service.py View File

@@ -293,7 +293,7 @@ class WSGIService(service.ServiceBase):
293 293
         if self.workers is not None and self.workers < 1:
294 294
             LOG.warning(
295 295
                 "Value of config option %(name)s_workers must be integer "
296
-                "greater than 1.  Input value ignored." % {'name': name})
296
+                "greater than 1.  Input value ignored.", {'name': name})
297 297
             # Reset workers to default
298 298
             self.workers = None
299 299
         self.server = wsgi.Server(

+ 4
- 4
manila/share/api.py View File

@@ -1323,7 +1323,7 @@ class API(base.Base):
1323 1323
                 constants.TASK_STATE_DATA_COPYING_IN_PROGRESS):
1324 1324
             data_rpc = data_rpcapi.DataAPI()
1325 1325
             LOG.info("Sending request to get share migration information"
1326
-                     " of share %s." % share['id'])
1326
+                     " of share %s.", share['id'])
1327 1327
 
1328 1328
             services = self.db.service_get_all_by_topic(context, 'manila-data')
1329 1329
 
@@ -1423,7 +1423,7 @@ class API(base.Base):
1423 1423
 
1424 1424
             data_rpc = data_rpcapi.DataAPI()
1425 1425
             LOG.info("Sending request to cancel migration of "
1426
-                     "share %s." % share['id'])
1426
+                     "share %s.", share['id'])
1427 1427
 
1428 1428
             services = self.db.service_get_all_by_topic(context, 'manila-data')
1429 1429
 
@@ -1873,8 +1873,8 @@ class API(base.Base):
1873 1873
         self.update(context, share, {'status': constants.STATUS_SHRINKING})
1874 1874
         self.share_rpcapi.shrink_share(context, share, new_size)
1875 1875
         LOG.info("Shrink share (id=%(id)s) request issued successfully."
1876
-                 " New size: %(size)s" % {'id': share['id'],
1877
-                                          'size': new_size})
1876
+                 " New size: %(size)s", {'id': share['id'],
1877
+                                         'size': new_size})
1878 1878
 
1879 1879
     def snapshot_allow_access(self, context, snapshot, access_type, access_to):
1880 1880
         """Allow access to a share snapshot."""

+ 4
- 4
manila/share/driver.py View File

@@ -1310,7 +1310,7 @@ class ShareDriver(object):
1310 1310
         except exception.ManilaException:
1311 1311
             msg = ('Could not delete share group snapshot member %(snap)s '
1312 1312
                    'for share %(share)s.')
1313
-            LOG.error(msg % {
1313
+            LOG.error(msg, {
1314 1314
                 'snap': share_snapshot['id'],
1315 1315
                 'share': share_snapshot['share_id'],
1316 1316
             })
@@ -1402,7 +1402,7 @@ class ShareDriver(object):
1402 1402
                     msg = ('Could not create share group snapshot. Failed '
1403 1403
                            'to create share snapshot %(snap)s for '
1404 1404
                            'share %(share)s.')
1405
-                    LOG.exception(msg % {
1405
+                    LOG.exception(msg, {
1406 1406
                         'snap': share_snapshot['id'],
1407 1407
                         'share': share_snapshot['share_id']
1408 1408
                     })
@@ -1466,7 +1466,7 @@ class ShareDriver(object):
1466 1466
             This value may be None.
1467 1467
         """
1468 1468
         snapshot_members = snap_dict.get('share_group_snapshot_members', [])
1469
-        LOG.debug('Deleting share group snapshot %s.' % snap_dict['id'])
1469
+        LOG.debug('Deleting share group snapshot %s.', snap_dict['id'])
1470 1470
         for member in snapshot_members:
1471 1471
             share_snapshot = {
1472 1472
                 'snapshot_id': member['share_group_snapshot_id'],
@@ -1482,7 +1482,7 @@ class ShareDriver(object):
1482 1482
             self.delete_snapshot(
1483 1483
                 context, share_snapshot, share_server=share_server)
1484 1484
 
1485
-        LOG.debug('Deleted share group snapshot %s.' % snap_dict['id'])
1485
+        LOG.debug('Deleted share group snapshot %s.', snap_dict['id'])
1486 1486
         return None, None
1487 1487
 
1488 1488
     def _collate_share_group_snapshot_info(self, share_group_dict,

+ 4
- 4
manila/share/drivers/container/driver.py View File

@@ -117,7 +117,7 @@ class ContainerShareDriver(driver.ShareDriver, driver.ExecuteMixin):
117 117
         super(ContainerShareDriver, self)._update_share_stats(data)
118 118
 
119 119
     def create_share(self, context, share, share_server=None):
120
-        LOG.debug("Create share on server '%s'." % share_server["id"])
120
+        LOG.debug("Create share on server '%s'.", share_server["id"])
121 121
         server_id = self._get_container_name(share_server["id"])
122 122
         share_name = share.share_id
123 123
         self.container.execute(
@@ -135,7 +135,7 @@ class ContainerShareDriver(driver.ShareDriver, driver.ExecuteMixin):
135 135
 
136 136
     @utils.synchronized('container_driver_delete_share_lock', external=True)
137 137
     def delete_share(self, context, share, share_server=None):
138
-        LOG.debug("Deleting share %(share)s on server '%(server)s'." %
138
+        LOG.debug("Deleting share %(share)s on server '%(server)s'.",
139 139
                   {"server": share_server["id"],
140 140
                    "share": share.share_id})
141 141
         server_id = self._get_container_name(share_server["id"])
@@ -255,7 +255,7 @@ class ContainerShareDriver(driver.ShareDriver, driver.ExecuteMixin):
255 255
                       self.configuration.container_ovs_bridge_name, host_veth,
256 256
                       *(e_mac + e_id + e_status + e_mcid), run_as_root=True)
257 257
         LOG.debug("Now container %(id)s should be accessible from network "
258
-                  "%(network)s and subnet %(subnet)s by address %(ip)s." %
258
+                  "%(network)s and subnet %(subnet)s by address %(ip)s.",
259 259
                   msg_helper)
260 260
 
261 261
     @utils.synchronized("container_driver_teardown_lock", external=True)
@@ -305,7 +305,7 @@ class ContainerShareDriver(driver.ShareDriver, driver.ExecuteMixin):
305 305
     def _setup_server(self, network_info, metadata=None):
306 306
         msg = "Creating share server '%s'."
307 307
         server_id = self._get_container_name(network_info["server_id"])
308
-        LOG.debug(msg % server_id)
308
+        LOG.debug(msg, server_id)
309 309
 
310 310
         veths_before = self._get_veth_state()
311 311
         try:

+ 1
- 1
manila/share/drivers/container/protocol_helper.py View File

@@ -110,7 +110,7 @@ class DockerCIFSHelper(object):
110 110
             existing_users = self._get_existing_users(server_id, share_name,
111 111
                                                       access)
112 112
         except TypeError:
113
-            LOG.warning("Can't access smbd at share %s." % share_name)
113
+            LOG.warning("Can't access smbd at share %s.", share_name)
114 114
             return
115 115
         else:
116 116
             allowed_users = " ".join(sorted(set(existing_users.split()) -

+ 1
- 1
manila/share/drivers/container/storage_helper.py View File

@@ -94,7 +94,7 @@ class LVMHelper(driver.ExecuteMixin):
94 94
             self._execute("lvremove", "-f", "--autobackup", "n",
95 95
                           to_remove, run_as_root=True)
96 96
         except exception.ProcessExecutionError as e:
97
-            LOG.warning("Failed to remove logical volume %s." % to_remove)
97
+            LOG.warning("Failed to remove logical volume %s.", to_remove)
98 98
             LOG.error(e)
99 99
 
100 100
     def extend_share(self, share, new_size, share_server=None):

+ 2
- 2
manila/share/drivers/generic.py View File

@@ -873,7 +873,7 @@ class GenericShareDriver(driver.ExecuteMixin, driver.ShareDriver):
873 873
 
874 874
     def _setup_server(self, network_info, metadata=None):
875 875
         msg = "Creating share server '%s'."
876
-        LOG.debug(msg % network_info['server_id'])
876
+        LOG.debug(msg, network_info['server_id'])
877 877
         server = self.service_instance_manager.set_up_service_instance(
878 878
             self.admin_context, network_info)
879 879
         for helper in self._helpers.values():
@@ -945,7 +945,7 @@ class GenericShareDriver(driver.ExecuteMixin, driver.ShareDriver):
945 945
 
946 946
             linked_volume_name = self._get_volume_name(share['id'])
947 947
             if share_volume['name'] != linked_volume_name:
948
-                LOG.debug('Manage: volume_id = %s' % share_volume['id'])
948
+                LOG.debug('Manage: volume_id = %s', share_volume['id'])
949 949
                 self.volume_api.update(self.admin_context, share_volume['id'],
950 950
                                        {'name': linked_volume_name})
951 951
 

+ 1
- 1
manila/share/drivers/glusterfs/common.py View File

@@ -205,7 +205,7 @@ class GlusterManager(object):
205 205
                       exc.exit_code in error_policy):
206 206
                     return
207 207
                 if logmsg:
208
-                    LOG.error("%s: GlusterFS instrumentation failed." %
208
+                    LOG.error("%s: GlusterFS instrumentation failed.",
209 209
                               logmsg)
210 210
                 raise exception.GlusterfsException(
211 211
                     _("GlusterFS management command '%(cmd)s' failed "

+ 4
- 4
manila/share/drivers/helpers.py View File

@@ -266,9 +266,9 @@ class NFSHelper(NASHelperBase):
266 266
                     LOG.warning(
267 267
                         "Unsupported access level %(level)s or access type "
268 268
                         "%(type)s, skipping removal of access rule to "
269
-                        "%(to)s." % {'level': access['access_level'],
270
-                                     'type': access['access_type'],
271
-                                     'to': access['access_to']})
269
+                        "%(to)s.", {'level': access['access_level'],
270
+                                    'type': access['access_type'],
271
+                                    'to': access['access_to']})
272 272
                     continue
273 273
                 self._ssh_exec(server, ['sudo', 'exportfs', '-u',
274 274
                                ':'.join((access['access_to'], local_path))])
@@ -282,7 +282,7 @@ class NFSHelper(NASHelperBase):
282 282
                         access['access_to']), out)
283 283
                 if found_item is not None:
284 284
                     LOG.warning("Access rule %(type)s:%(to)s already "
285
-                                "exists for share %(name)s" % {
285
+                                "exists for share %(name)s", {
286 286
                                     'to': access['access_to'],
287 287
                                     'type': access['access_type'],
288 288
                                     'name': share_name

+ 1
- 1
manila/share/drivers/hitachi/hnas/ssh.py View File

@@ -313,7 +313,7 @@ class HNASSSHBackend(object):
313 313
                         command = ['tree-clone-job-abort', job_id]
314 314
                         self._execute(command)
315 315
                         LOG.error("Timeout in snapshot creation from "
316
-                                  "source path %s." % src_path)
316
+                                  "source path %s.", src_path)
317 317
                         msg = _("Share snapshot of source path %s "
318 318
                                 "was not created.") % src_path
319 319
                         raise exception.HNASBackendException(msg=msg)

+ 1
- 1
manila/share/drivers/huawei/v3/helper.py View File

@@ -214,7 +214,7 @@ class RestHelper(object):
214 214
             utils.execute('chmod', '666', filepath, run_as_root=True)
215 215
 
216 216
         except Exception as err:
217
-            LOG.error('Bad response from change file: %s.' % err)
217
+            LOG.error('Bad response from change file: %s.', err)
218 218
             raise
219 219
 
220 220
     def create_share(self, share_name, fs_id, share_proto):

+ 2
- 2
manila/share/drivers/ibm/gpfs.py View File

@@ -226,7 +226,7 @@ class GPFSShareDriver(driver.ExecuteMixin, driver.GaneshaMixin,
226 226
 
227 227
         # exit_status == -1 if no exit code was returned
228 228
         if exit_status != -1:
229
-            LOG.debug('Result was %s' % exit_status)
229
+            LOG.debug('Result was %s', exit_status)
230 230
             if ((check_exit_code and exit_status != 0)
231 231
                 and
232 232
                 (ignore_exit_code is None or
@@ -692,7 +692,7 @@ class GPFSShareDriver(driver.ExecuteMixin, driver.GaneshaMixin,
692 692
                 LOG.exception(msg)
693 693
                 raise exception.GPFSException(msg)
694 694
             LOG.info('Existing share %(shr)s has size %(size)s KB '
695
-                     'which is below 1GiB, so extended it to 1GiB.' %
695
+                     'which is below 1GiB, so extended it to 1GiB.',
696 696
                      {'shr': new_share_name, 'size': share_size})
697 697
             share_size = 1
698 698
         else:

+ 1
- 1
manila/share/drivers/lvm.py View File

@@ -121,7 +121,7 @@ class LVMMixin(driver.ExecuteMixin):
121 121
             if "not found" not in exc.stderr:
122 122
                 LOG.exception("Error deleting volume")
123 123
                 raise
124
-            LOG.warning("Volume not found: %s" % exc.stderr)
124
+            LOG.warning("Volume not found: %s", exc.stderr)
125 125
 
126 126
     def _create_snapshot(self, context, snapshot):
127 127
         """Creates a snapshot."""

+ 2
- 2
manila/share/drivers/netapp/common.py View File

@@ -108,7 +108,7 @@ class NetAppDriver(object):
108 108
 
109 109
         fmt = {'storage_family': storage_family, 'driver_mode': driver_mode}
110 110
         LOG.info('Requested unified config: %(storage_family)s and '
111
-                 '%(driver_mode)s.' % fmt)
111
+                 '%(driver_mode)s.', fmt)
112 112
 
113 113
         family_meta = NETAPP_UNIFIED_DRIVER_REGISTRY.get(storage_family)
114 114
         if family_meta is None:
@@ -125,5 +125,5 @@ class NetAppDriver(object):
125 125
         kwargs['netapp_mode'] = 'proxy'
126 126
         driver = importutils.import_object(driver_loc, *args, **kwargs)
127 127
         LOG.info('NetApp driver of family %(storage_family)s and mode '
128
-                 '%(driver_mode)s loaded.' % fmt)
128
+                 '%(driver_mode)s loaded.', fmt)
129 129
         return driver

+ 4
- 4
manila/share/drivers/netapp/dataontap/client/client_cmode.py View File

@@ -2649,7 +2649,7 @@ class NetAppCmodeClient(client_base.NetAppBaseClient):
2649 2649
                 try:
2650 2650
                     client.delete_nfs_export_policy(policy)
2651 2651
                 except netapp_api.NaApiError:
2652
-                    LOG.debug('Could not delete export policy %s.' % policy)
2652
+                    LOG.debug('Could not delete export policy %s.', policy)
2653 2653
 
2654 2654
     @na_utils.trace
2655 2655
     def _get_deleted_nfs_export_policies(self):
@@ -2717,7 +2717,7 @@ class NetAppCmodeClient(client_base.NetAppBaseClient):
2717 2717
             node_client.send_request('ems-autosupport-log', message_dict)
2718 2718
             LOG.debug('EMS executed successfully.')
2719 2719
         except netapp_api.NaApiError as e:
2720
-            LOG.warning('Failed to invoke EMS. %s' % e)
2720
+            LOG.warning('Failed to invoke EMS. %s', e)
2721 2721
 
2722 2722
     @na_utils.trace
2723 2723
     def get_aggregate(self, aggregate_name):
@@ -2741,7 +2741,7 @@ class NetAppCmodeClient(client_base.NetAppBaseClient):
2741 2741
                                          desired_attributes=desired_attributes)
2742 2742
         except netapp_api.NaApiError:
2743 2743
             msg = _('Failed to get info for aggregate %s.')
2744
-            LOG.exception(msg % aggregate_name)
2744
+            LOG.exception(msg, aggregate_name)
2745 2745
             return {}
2746 2746
 
2747 2747
         if len(aggrs) < 1:
@@ -2814,7 +2814,7 @@ class NetAppCmodeClient(client_base.NetAppBaseClient):
2814 2814
             result = self.send_iter_request('storage-disk-get-iter', api_args)
2815 2815
         except netapp_api.NaApiError:
2816 2816
             msg = _('Failed to get disk info for aggregate %s.')
2817
-            LOG.exception(msg % aggregate_name)
2817
+            LOG.exception(msg, aggregate_name)
2818 2818
             return disk_types
2819 2819
 
2820 2820
         attributes_list = result.get_child_by_name(

+ 3
- 3
manila/share/drivers/netapp/dataontap/cluster_mode/lib_base.py View File

@@ -173,7 +173,7 @@ class NetAppCmodeFileStorageLibrary(object):
173 173
         if 'nfs' not in self._licenses and 'cifs' not in self._licenses:
174 174
             msg = 'Neither NFS nor CIFS is licensed on %(backend)s'
175 175
             msg_args = {'backend': self._backend_name}
176
-            LOG.error(msg % msg_args)
176
+            LOG.error(msg, msg_args)
177 177
 
178 178
         return self._licenses
179 179
 
@@ -1064,7 +1064,7 @@ class NetAppCmodeFileStorageLibrary(object):
1064 1064
             msg = _('Could not determine snapshot %(snap)s size from '
1065 1065
                     'volume %(vol)s.')
1066 1066
             msg_args = {'snap': existing_snapshot_name, 'vol': share_name}
1067
-            LOG.exception(msg % msg_args)
1067
+            LOG.exception(msg, msg_args)
1068 1068
             raise exception.ShareNotFound(share_id=snapshot['share_id'])
1069 1069
 
1070 1070
         # Ensure there aren't any mirrors on this volume
@@ -2139,7 +2139,7 @@ class NetAppCmodeFileStorageLibrary(object):
2139 2139
             math.ceil(float(backend_volume['size']) / units.Gi))
2140 2140
 
2141 2141
         LOG.debug("Checking for a pre-existing QoS policy group that "
2142
-                  "is exclusive to the volume %s." % backend_share_name)
2142
+                  "is exclusive to the volume %s.", backend_share_name)
2143 2143
 
2144 2144
         # Does the volume have an exclusive QoS policy that we can rename?
2145 2145
         if backend_volume['qos-policy-group-name'] is not None:

+ 1
- 1
manila/share/drivers/netapp/dataontap/cluster_mode/lib_multi_svm.py View File

@@ -247,7 +247,7 @@ class NetAppCmodeMultiSVMFileStorageLibrary(
247 247
 
248 248
         network_allocations = network_info.get('admin_network_allocations')
249 249
         if not network_allocations:
250
-            LOG.info('No admin network defined for Vserver %s.' %
250
+            LOG.info('No admin network defined for Vserver %s.',
251 251
                      vserver_name)
252 252
             return
253 253
 

+ 1
- 1
manila/share/drivers/netapp/dataontap/cluster_mode/lib_single_svm.py View File

@@ -77,7 +77,7 @@ class NetAppCmodeSingleSVMFileStorageLibrary(
77 77
         msg_args = {'vserver': self._vserver, 'backend': self._backend_name}
78 78
         msg_args['creds'] = ('cluster' if self._have_cluster_creds
79 79
                              else 'Vserver')
80
-        LOG.info(msg % msg_args)
80
+        LOG.info(msg, msg_args)
81 81
 
82 82
         (super(NetAppCmodeSingleSVMFileStorageLibrary, self).
83 83
             check_for_setup_error())

+ 3
- 3
manila/share/drivers/netapp/dataontap/protocols/nfs_cmode.py View File

@@ -89,14 +89,14 @@ class NetAppCmodeNFSHelper(base.NetAppBaseHelper):
89 89
 
90 90
         # Rename policy currently in force
91 91
         LOG.info('Renaming NFS export policy for share %(share)s to '
92
-                 '%(policy)s.' %
92
+                 '%(policy)s.',
93 93
                  {'share': share_name, 'policy': temp_old_export_policy_name})
94 94
         self._client.rename_nfs_export_policy(export_policy_name,
95 95
                                               temp_old_export_policy_name)
96 96
 
97 97
         # Switch share to the new policy
98 98
         LOG.info('Setting NFS export policy for share %(share)s to '
99
-                 '%(policy)s.' %
99
+                 '%(policy)s.',
100 100
                  {'share': share_name, 'policy': temp_new_export_policy_name})
101 101
         self._client.set_nfs_export_policy_for_volume(
102 102
             share_name, temp_new_export_policy_name)
@@ -106,7 +106,7 @@ class NetAppCmodeNFSHelper(base.NetAppBaseHelper):
106 106
 
107 107
         # Rename new policy to its final name
108 108
         LOG.info('Renaming NFS export policy for share %(share)s to '
109
-                 '%(policy)s.' %
109
+                 '%(policy)s.',
110 110
                  {'share': share_name, 'policy': export_policy_name})
111 111
         self._client.rename_nfs_export_policy(temp_new_export_policy_name,
112 112
                                               export_policy_name)

+ 5
- 5
manila/share/drivers/netapp/utils.py View File

@@ -74,7 +74,7 @@ def setup_tracing(trace_flags_string):
74 74
         flags = trace_flags_string.split(',')
75 75
         flags = [flag.strip() for flag in flags]
76 76
         for invalid_flag in list(set(flags) - set(VALID_TRACE_FLAGS)):
77
-            LOG.warning('Invalid trace flag: %s' % invalid_flag)
77
+            LOG.warning('Invalid trace flag: %s', invalid_flag)
78 78
         TRACE_METHOD = 'method' in flags
79 79
         TRACE_API = 'api' in flags
80 80
 
@@ -164,7 +164,7 @@ class OpenStackInfo(object):
164 164
                                       "'%{version}\t%{release}\t%{vendor}'",
165 165
                                       self.PACKAGE_NAME)
166 166
             if not out:
167
-                LOG.info('No rpm info found for %(pkg)s package.' % {
167
+                LOG.info('No rpm info found for %(pkg)s package.', {
168 168
                     'pkg': self.PACKAGE_NAME})
169 169
                 return False
170 170
             parts = out.split()
@@ -173,7 +173,7 @@ class OpenStackInfo(object):
173 173
             self._vendor = ' '.join(parts[2::])
174 174
             return True
175 175
         except Exception as e:
176
-            LOG.info('Could not run rpm command: %(msg)s.' % {
176
+            LOG.info('Could not run rpm command: %(msg)s.', {
177 177
                 'msg': e})
178 178
             return False
179 179
 
@@ -186,7 +186,7 @@ class OpenStackInfo(object):
186 186
                                       self.PACKAGE_NAME)
187 187
             if not out:
188 188
                 LOG.info(
189
-                    'No dpkg-query info found for %(pkg)s package.' % {
189
+                    'No dpkg-query info found for %(pkg)s package.', {
190 190
                         'pkg': self.PACKAGE_NAME})
191 191
                 return False
192 192
             # Debian format: [epoch:]upstream_version[-debian_revision]
@@ -204,7 +204,7 @@ class OpenStackInfo(object):
204 204
                 self._vendor = _vendor
205 205
             return True
206 206
         except Exception as e:
207
-            LOG.info('Could not run dpkg-query command: %(msg)s.' % {
207
+            LOG.info('Could not run dpkg-query command: %(msg)s.', {
208 208
                 'msg': e})
209 209
             return False
210 210
 

+ 1
- 1
manila/share/drivers/quobyte/jsonrpc.py View File

@@ -97,7 +97,7 @@ class JsonRpc(object):
97 97
             return self._checked_for_application_error(response)
98 98
 
99 99
         # If things did not work out provide error info
100
-        LOG.debug("Backend request resulted in error: %s" % result.text)
100
+        LOG.debug("Backend request resulted in error: %s", result.text)
101 101
         result.raise_for_status()
102 102
 
103 103
     def _checked_for_application_error(self, result):

+ 4
- 4
manila/share/drivers/quobyte/quobyte.py View File

@@ -245,9 +245,9 @@ class QuobyteShareDriver(driver.ExecuteMixin, driver.ShareDriver,):
245 245
             self._get_project_name(context, share['project_id']))
246 246
         if not volume_uuid:
247 247
             LOG.warning("No volume found for "
248
-                        "share %(project_id)s/%(name)s"
249
-                        % {"project_id": share['project_id'],
250
-                           "name": share['name']})
248
+                        "share %(project_id)s/%(name)s",
249
+                        {"project_id": share['project_id'],
250
+                         "name": share['name']})
251 251
             return
252 252
 
253 253
         if self.configuration.quobyte_delete_shares:
@@ -274,7 +274,7 @@ class QuobyteShareDriver(driver.ExecuteMixin, driver.ShareDriver,):
274 274
             share['name'],
275 275
             self._get_project_name(context, share['project_id']))
276 276
 
277
-        LOG.debug("Ensuring Quobyte share %s" % share['name'])
277
+        LOG.debug("Ensuring Quobyte share %s", share['name'])
278 278
 
279 279
         if not volume_uuid:
280 280
             raise (exception.ShareResourceNotFound(

+ 2
- 2
manila/share/drivers/service_instance.py View File

@@ -642,7 +642,7 @@ class ServiceInstanceManager(object):
642 642
                 break
643 643
 
644 644
             LOG.debug("Waiting for instance %(instance_id)s to be active. "
645
-                      "Current status: %(instance_status)s." %
645
+                      "Current status: %(instance_status)s.",
646 646
                       dict(instance_id=instance_id,
647 647
                            instance_status=instance_status))
648 648
             time.sleep(1)
@@ -960,7 +960,7 @@ class NeutronNetworkHelper(BaseNetworkhelper):
960 960
         except Exception as e:
961 961
             if 'does not exist' in six.text_type(e):
962 962
                 LOG.warning(
963
-                    "Device %s does not exist anymore." % device.name)
963
+                    "Device %s does not exist anymore.", device.name)
964 964
             else:
965 965
                 raise
966 966
         for addr in addr_list:

+ 3
- 3
manila/share/drivers/windows/windows_smb_helper.py View File

@@ -209,9 +209,9 @@ class WindowsSMBHelper(helpers.CIFSHelperBase):
209 209
                 LOG.warning(
210 210
                     "Unsupported access level %(level)s or access type "
211 211
                     "%(type)s, skipping removal of access rule to "
212
-                    "%(to)s." % {'level': deleted_rule['access_level'],
213
-                                 'type': deleted_rule['access_type'],
214
-                                 'to': deleted_rule['access_to']})
212
+                    "%(to)s.", {'level': deleted_rule['access_level'],
213
+                                'type': deleted_rule['access_type'],
214
+                                'to': deleted_rule['access_to']})
215 215
                 continue
216 216
             self._revoke_share_access(server, share_name,
217 217
                                       deleted_rule['access_to'])

+ 1
- 1
manila/share/drivers/zfsonlinux/driver.py View File

@@ -1550,5 +1550,5 @@ class ZFSonLinuxShareDriver(zfs_utils.ExecuteMixin, driver.ShareDriver):
1550 1550
                 e)
1551 1551
 
1552 1552
         LOG.debug(
1553
-            "Migration of share with ID '%s' has been canceled." %
1553
+            "Migration of share with ID '%s' has been canceled.",
1554 1554
             source_share["id"])

+ 2
- 2
manila/share/manager.py View File

@@ -1956,7 +1956,7 @@ class ShareManager(manager.SchedulerDependentManager):
1956 1956
                     msg = _("The driver was unable to delete access rules "
1957 1957
                             "for the replica: %s. Will attempt to delete "
1958 1958
                             "the replica anyway.")
1959
-                    LOG.exception(msg % share_replica['id'])
1959
+                    LOG.exception(msg, share_replica['id'])
1960 1960
                     exc_context.reraise = False
1961 1961
 
1962 1962
         try:
@@ -3265,7 +3265,7 @@ class ShareManager(manager.SchedulerDependentManager):
3265 3265
         share_server = self._get_share_server(context, share_instance)
3266 3266
 
3267 3267
         LOG.debug("Received request to update access for share instance"
3268
-                  " %s." % share_instance_id)
3268
+                  " %s.", share_instance_id)
3269 3269
 
3270 3270
         self.access_helper.update_access_rules(
3271 3271
             context,

+ 1
- 1
manila/tests/fake_utils.py View File

@@ -77,7 +77,7 @@ def fake_execute(*cmd_parts, **kwargs):
77 77
     for fake_replier in _fake_execute_repliers:
78 78
         if re.match(fake_replier[0], cmd_str):
79 79
             reply_handler = fake_replier[1]
80
-            LOG.debug('Faked command matched %s' % fake_replier[0])
80
+            LOG.debug('Faked command matched %s', fake_replier[0])
81 81
             break
82 82
 
83 83
     if isinstance(reply_handler, six.string_types):

+ 2
- 1
manila/tests/message/test_api.py View File

@@ -71,7 +71,8 @@ class MessageApiTest(test.TestCase):
71 71
         self.message_api.db.message_create.assert_called_once_with(
72 72
             self.ctxt, mock.ANY)
73 73
         exception_log.assert_called_once_with(
74
-            'Failed to create message record for request_id fakerequestid')
74
+            'Failed to create message record for request_id %s',
75
+            self.ctxt.request_id)
75 76
 
76 77
     def test_get(self):
77 78
         self.message_api.get(self.ctxt, 'fake_id')

+ 8
- 8
manila/tests/share/drivers/dummy.py View File

@@ -416,13 +416,13 @@ class DummyDriver(driver.ShareDriver):
416 416
     @slow_me_down
417 417
     def create_cgsnapshot(self, context, snap_dict, share_server=None):
418 418
         """Create a consistency group snapshot."""
419
-        LOG.debug("Successfully created CG snapshot %s." % snap_dict["id"])
419
+        LOG.debug("Successfully created CG snapshot %s.", snap_dict["id"])
420 420
         return None, None
421 421
 
422 422
     @slow_me_down
423 423
     def delete_cgsnapshot(self, context, snap_dict, share_server=None):
424 424
         """Delete a consistency group snapshot."""
425
-        LOG.debug("Successfully deleted CG snapshot %s." % snap_dict["id"])
425
+        LOG.debug("Successfully deleted CG snapshot %s.", snap_dict["id"])
426 426
         return None, None
427 427
 
428 428
     @slow_me_down
@@ -545,7 +545,7 @@ class DummyDriver(driver.ShareDriver):
545 545
 
546 546
         """
547 547
         LOG.debug(
548
-            "Migration of dummy share with ID '%s' has been started." %
548
+            "Migration of dummy share with ID '%s' has been started.",
549 549
             source_share["id"])
550 550
         self.migration_progress[source_share['share_id']] = 0
551 551
 
@@ -561,9 +561,9 @@ class DummyDriver(driver.ShareDriver):
561 561
         self.migration_progress[source_share["id"]] += 50
562 562
 
563 563
         LOG.debug(
564
-            "Migration of dummy share with ID '%s' is continuing, %s." %
565
-            (source_share["id"],
566
-             self.migration_progress[source_share["id"]]))
564
+            "Migration of dummy share with ID '%s' is continuing, %s.",
565
+            source_share["id"],
566
+            self.migration_progress[source_share["id"]])
567 567
 
568 568
         return self.migration_progress[source_share["id"]] == 100
569 569
 
@@ -596,7 +596,7 @@ class DummyDriver(driver.ShareDriver):
596 596
             }
597 597
         )
598 598
         LOG.debug(
599
-            "Migration of dummy share with ID '%s' has been completed." %
599
+            "Migration of dummy share with ID '%s' has been completed.",
600 600
             source_share_ref["id"])
601 601
         self.migration_progress.pop(source_share_ref["id"], None)
602 602
 
@@ -610,7 +610,7 @@ class DummyDriver(driver.ShareDriver):
610 610
             destination_share_server=None):
611 611
         """Is called to cancel driver migration."""
612 612
         LOG.debug(
613
-            "Migration of dummy share with ID '%s' has been canceled." %
613
+            "Migration of dummy share with ID '%s' has been canceled.",
614 614
             source_share["id"])
615 615
         self.migration_progress.pop(source_share["id"], None)
616 616
 

+ 2
- 1
manila/tests/share/drivers/quobyte/test_quobyte.py View File

@@ -192,7 +192,8 @@ class QuobyteShareDriverTestCase(test.TestCase):
192 192
         self._driver.delete_share(self._context, self.share)
193 193
 
194 194
         mock_warning.assert_called_with(
195
-            'No volume found for share fake_project_uuid/fakename')
195
+            'No volume found for share %(project_id)s/%(name)s',
196
+            {'project_id': 'fake_project_uuid', 'name': 'fakename'})
196 197
 
197 198
     def test_allow_access(self):
198 199
         def rpc_handler(name, *args):

+ 1
- 1
manila_tempest_tests/common/remote_client.py View File

@@ -80,7 +80,7 @@ class RemoteClient(object):
80 80
         # Shell options below add more clearness on failures,
81 81
         # path is extended for some non-cirros guest oses (centos7)
82 82
         cmd = CONF.validation.ssh_shell_prologue + " " + cmd
83
-        LOG.debug("Remote command: %s" % cmd)
83
+        LOG.debug("Remote command: %s", cmd)
84 84
         return self.ssh_client.exec_command(cmd)
85 85
 
86 86
     @debug_ssh

+ 2
- 2
manila_tempest_tests/tests/api/base.py View File

@@ -91,7 +91,7 @@ class handle_cleanup_exceptions(object):
91 91
             return False  # Do not suppress error if any
92 92
         if exc_traceback:
93 93
             LOG.error("Suppressed cleanup error in Manila: "
94
-                      "\n%s" % traceback.format_exc())
94
+                      "\n%s", traceback.format_exc())
95 95
         return True  # Suppress error if any
96 96
 
97 97
 
@@ -925,7 +925,7 @@ class BaseSharesTest(test.BaseTestCase):
925 925
                         client.wait_for_resource_deletion(replica_id=res_id)
926 926
                     else:
927 927
                         LOG.warning("Provided unsupported resource type for "
928
-                                    "cleanup '%s'. Skipping." % res["type"])
928
+                                    "cleanup '%s'. Skipping.", res["type"])
929 929
                 res["deleted"] = True
930 930
 
931 931
     @classmethod

+ 1
- 1
manila_tempest_tests/tests/api/test_security_services.py View File

@@ -183,7 +183,7 @@ class SecurityServicesTest(base.BaseSharesTest,
183 183
             LOG.warning("Caught exception. It is expected in case backend "
184 184
                         "fails having security-service with improper data "
185 185
                         "that leads to share-server creation error. "
186
-                        "%s" % six.text_type(e))
186
+                        "%s", six.text_type(e))
187 187
 
188 188
         update_data = {
189 189
             "name": "name",

+ 1
- 1
manila_tempest_tests/tests/api/test_security_services_mapping_negative.py View File

@@ -115,7 +115,7 @@ class SecServicesMappingNegativeTest(base.BaseSharesTest):
115 115
             LOG.warning("Caught exception. It is expected in case backend "
116 116
                         "fails having security-service with improper data "
117 117
                         "that leads to share-server creation error. "
118
-                        "%s" % six.text_type(e))
118
+                        "%s", six.text_type(e))
119 119
 
120 120
         self.assertRaises(lib_exc.Forbidden,
121 121
                           self.cl.remove_sec_service_from_share_network,

+ 1
- 1
manila_tempest_tests/tests/api/test_security_services_negative.py View File

@@ -101,7 +101,7 @@ class SecurityServicesNegativeTest(base.BaseSharesTest):
101 101
             LOG.warning("Caught exception. It is expected in case backend "
102 102
                         "fails having security-service with improper data "
103 103
                         "that leads to share-server creation error. "
104
-                        "%s" % six.text_type(e))
104
+                        "%s", six.text_type(e))
105 105
 
106 106
         self.assertRaises(lib_exc.Forbidden,
107 107
                           self.shares_client.update_security_service,

+ 1
- 1
manila_tempest_tests/tests/scenario/manager_share.py View File

@@ -228,7 +228,7 @@ class ShareScenarioTest(manager.NetworkScenarioTest):
228 228
         try:
229 229
             linux_client.validate_authentication()
230 230
         except Exception:
231
-            LOG.exception('Initializing SSH connection to %s failed' % ip)
231
+            LOG.exception('Initializing SSH connection to %s failed', ip)
232 232
             self._log_console_output()
233 233
             raise
234 234
 

+ 1
- 1
manila_tempest_tests/tests/scenario/test_share_basic_ops.py View File

@@ -189,7 +189,7 @@ class ShareBasicOpsBase(manager.ShareScenarioTest):
189 189
                 first_address = net_addresses.values()[0][0]
190 190
                 ip = first_address['addr']
191 191
             except Exception:
192
-                LOG.debug("Instance: %s" % instance)
192
+                LOG.debug("Instance: %s", instance)
193 193
                 # In case on an error ip will be still none
194 194
                 LOG.exception("Instance does not have a valid IP address."
195 195
                               "Falling back to default")

+ 5
- 0
tox.ini View File

@@ -112,6 +112,11 @@ commands = alembic -c manila/db/migrations/alembic.ini revision -m ""{posargs}
112 112
 # Following checks are ignored on purpose:
113 113
 ignore = 
114 114
 builtins = _
115
+# [H106] Don't put vim configuration in source files.
116
+# [H203] Use assertIs(Not)None to check for None.
117
+# [H904] Use ',' instead of '%', String interpolation should be delayed to be handled by the logging code,
118
+#        rather than being done at the point of the logging call..
119
+enable-extensions = H106,H203,H904
115 120
 exclude = .git,.tox,.testrepository,.venv,build,cover,dist,doc,*egg,api-ref/build,*/source/conf.py
116 121
 
117 122
 [hacking]

Loading…
Cancel
Save