Add a couple of new hacking checks
* D705: timeutils.utcnow() must be used instead of datetime.%s() * D706: Don't translate debug level logs * D707: basestring is not Python3-compatible, use six.string_types instead. * D708: Do not use xrange. Use range, or six.moves.range for large loops. * D709: LOG.audit is deprecated, please use LOG.info! Change-Id: I5c20102907dfd5534691641a8de102b22c94ed9a
This commit is contained in:
parent
e674ec8e6d
commit
bb0be47fe3
@ -50,7 +50,7 @@ if __name__ == '__main__':
|
|||||||
|
|
||||||
msg = "Creating %s records" % cfg.CONF.records
|
msg = "Creating %s records" % cfg.CONF.records
|
||||||
LOG.info(msg)
|
LOG.info(msg)
|
||||||
for i in xrange(0, cfg.CONF.records):
|
for i in range(0, cfg.CONF.records):
|
||||||
name = '%s.%s' % (str(uuid.uuid4()), domain.name)
|
name = '%s.%s' % (str(uuid.uuid4()), domain.name)
|
||||||
record = {"name": name, "type": "A", "data": "10.0.0.1"}
|
record = {"name": name, "type": "A", "data": "10.0.0.1"}
|
||||||
client.records.create(domain, record)
|
client.records.create(domain, record)
|
||||||
|
@ -24,6 +24,7 @@ import string
|
|||||||
import random
|
import random
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
import six
|
||||||
from eventlet import tpool
|
from eventlet import tpool
|
||||||
from dns import zone as dnszone
|
from dns import zone as dnszone
|
||||||
from dns import exception as dnsexception
|
from dns import exception as dnsexception
|
||||||
@ -1969,7 +1970,7 @@ class Service(service.RPCService, service.Service):
|
|||||||
if 'ptrdname' in values.obj_what_changed() and\
|
if 'ptrdname' in values.obj_what_changed() and\
|
||||||
values['ptrdname'] is None:
|
values['ptrdname'] is None:
|
||||||
self._unset_floatingip_reverse(context, region, floatingip_id)
|
self._unset_floatingip_reverse(context, region, floatingip_id)
|
||||||
elif isinstance(values['ptrdname'], basestring):
|
elif isinstance(values['ptrdname'], six.string_types):
|
||||||
return self._set_floatingip_reverse(
|
return self._set_floatingip_reverse(
|
||||||
context, region, floatingip_id, values)
|
context, region, floatingip_id, values)
|
||||||
|
|
||||||
|
@ -21,6 +21,7 @@ import dns
|
|||||||
import dns.exception
|
import dns.exception
|
||||||
import dns.zone
|
import dns.zone
|
||||||
import eventlet
|
import eventlet
|
||||||
|
import six
|
||||||
from dns import rdatatype
|
from dns import rdatatype
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
@ -289,7 +290,7 @@ def expand_servers(servers):
|
|||||||
"""
|
"""
|
||||||
data = []
|
data = []
|
||||||
for srv in servers:
|
for srv in servers:
|
||||||
if isinstance(srv, basestring):
|
if isinstance(srv, six.string_types):
|
||||||
host, port = utils.split_host_port(srv, 53)
|
host, port = utils.split_host_port(srv, 53)
|
||||||
srv = {"ip": host, "port": port}
|
srv = {"ip": host, "port": port}
|
||||||
data.append(srv)
|
data.append(srv)
|
||||||
|
@ -13,6 +13,7 @@
|
|||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
import six
|
||||||
|
|
||||||
|
|
||||||
class Base(Exception):
|
class Base(Exception):
|
||||||
@ -27,7 +28,7 @@ class Base(Exception):
|
|||||||
|
|
||||||
super(Base, self).__init__(*args, **kwargs)
|
super(Base, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
if len(args) > 0 and isinstance(args[0], basestring):
|
if len(args) > 0 and isinstance(args[0], six.string_types):
|
||||||
self.error_message = args[0]
|
self.error_message = args[0]
|
||||||
|
|
||||||
|
|
||||||
|
@ -17,6 +17,17 @@ import re
|
|||||||
import pep8
|
import pep8
|
||||||
|
|
||||||
|
|
||||||
|
# D701: Default paramater value is a mutable type
|
||||||
|
# D702: Log messages require translation
|
||||||
|
# D703: Found use of _() without explicit import of _!
|
||||||
|
# D704: Found import of %s. This oslo library has been graduated!
|
||||||
|
# D705: timeutils.utcnow() must be used instead of datetime.%s()
|
||||||
|
# D706: Don't translate debug level logs
|
||||||
|
# D707: basestring is not Python3-compatible, use six.string_types instead.
|
||||||
|
# D708: Do not use xrange. Use range, or six.moves.range for large loops.
|
||||||
|
# D709: LOG.audit is deprecated, please use LOG.info!
|
||||||
|
|
||||||
|
|
||||||
UNDERSCORE_IMPORT_FILES = []
|
UNDERSCORE_IMPORT_FILES = []
|
||||||
|
|
||||||
|
|
||||||
@ -56,6 +67,20 @@ def validate_log_translations(logical_line, physical_line, filename):
|
|||||||
yield (0, msg)
|
yield (0, msg)
|
||||||
|
|
||||||
|
|
||||||
|
def no_translate_debug_logs(logical_line, filename):
|
||||||
|
"""Check for 'LOG.debug(_('
|
||||||
|
As per our translation policy,
|
||||||
|
https://wiki.openstack.org/wiki/LoggingStandards#Log_Translation
|
||||||
|
we shouldn't translate debug level logs.
|
||||||
|
* This check assumes that 'LOG' is a logger.
|
||||||
|
* Use filename so we can start enforcing this in specific folders instead
|
||||||
|
of needing to do so all at once.
|
||||||
|
N319
|
||||||
|
"""
|
||||||
|
if logical_line.startswith("LOG.debug(_("):
|
||||||
|
yield(0, "D706: Don't translate debug level logs")
|
||||||
|
|
||||||
|
|
||||||
def check_explicit_underscore_import(logical_line, filename):
|
def check_explicit_underscore_import(logical_line, filename):
|
||||||
"""Check for explicit import of the _ function
|
"""Check for explicit import of the _ function
|
||||||
|
|
||||||
@ -95,8 +120,51 @@ def no_import_graduated_oslo_libraries(logical_line, filename):
|
|||||||
"graduated!" % matches.group(1))
|
"graduated!" % matches.group(1))
|
||||||
|
|
||||||
|
|
||||||
|
def use_timeutils_utcnow(logical_line, filename):
|
||||||
|
# tools are OK to use the standard datetime module
|
||||||
|
if "/tools/" in filename:
|
||||||
|
return
|
||||||
|
|
||||||
|
msg = "D705: timeutils.utcnow() must be used instead of datetime.%s()"
|
||||||
|
|
||||||
|
datetime_funcs = ['now', 'utcnow']
|
||||||
|
for f in datetime_funcs:
|
||||||
|
pos = logical_line.find('datetime.%s' % f)
|
||||||
|
if pos != -1:
|
||||||
|
yield (pos, msg % f)
|
||||||
|
|
||||||
|
|
||||||
|
def check_no_basestring(logical_line):
|
||||||
|
if re.search(r"\bbasestring\b", logical_line):
|
||||||
|
msg = ("D707: basestring is not Python3-compatible, use "
|
||||||
|
"six.string_types instead.")
|
||||||
|
yield(0, msg)
|
||||||
|
|
||||||
|
|
||||||
|
def check_python3_xrange(logical_line):
|
||||||
|
if re.search(r"\bxrange\s*\(", logical_line):
|
||||||
|
yield(0, "D708: Do not use xrange. Use range, or six.moves.range for "
|
||||||
|
"large loops.")
|
||||||
|
|
||||||
|
|
||||||
|
def check_no_log_audit(logical_line):
|
||||||
|
"""Ensure that we are not using LOG.audit messages
|
||||||
|
Plans are in place going forward as discussed in the following
|
||||||
|
spec (https://review.openstack.org/#/c/132552/) to take out
|
||||||
|
LOG.audit messages. Given that audit was a concept invented
|
||||||
|
for OpenStack we can enforce not using it.
|
||||||
|
"""
|
||||||
|
if "LOG.audit(" in logical_line:
|
||||||
|
yield(0, "D709: LOG.audit is deprecated, please use LOG.info!")
|
||||||
|
|
||||||
|
|
||||||
def factory(register):
|
def factory(register):
|
||||||
register(mutable_default_arguments)
|
register(mutable_default_arguments)
|
||||||
register(validate_log_translations)
|
register(validate_log_translations)
|
||||||
|
register(no_translate_debug_logs)
|
||||||
register(check_explicit_underscore_import)
|
register(check_explicit_underscore_import)
|
||||||
register(no_import_graduated_oslo_libraries)
|
register(no_import_graduated_oslo_libraries)
|
||||||
|
register(use_timeutils_utcnow)
|
||||||
|
register(check_no_basestring)
|
||||||
|
register(check_python3_xrange)
|
||||||
|
register(check_no_log_audit)
|
||||||
|
@ -22,7 +22,8 @@ from designate.network_api.base import NetworkAPI
|
|||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
POOL = dict([(str(uuid.uuid4()), '192.168.2.%s' % i) for i in xrange(0, 254)])
|
POOL = dict([(str(uuid.uuid4()), '192.168.2.%s' % i) for i in
|
||||||
|
range(0, 254)])
|
||||||
ALLOCATIONS = {}
|
ALLOCATIONS = {}
|
||||||
|
|
||||||
|
|
||||||
|
@ -104,7 +104,7 @@ class RPCService(object):
|
|||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super(RPCService, self).__init__(*args, **kwargs)
|
super(RPCService, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
LOG.debug(_("Creating RPC Server on topic '%s'") % self._rpc_topic)
|
LOG.debug("Creating RPC Server on topic '%s'" % self._rpc_topic)
|
||||||
self._rpc_server = rpc.get_server(
|
self._rpc_server = rpc.get_server(
|
||||||
messaging.Target(topic=self._rpc_topic, server=self._host),
|
messaging.Target(topic=self._rpc_topic, server=self._host),
|
||||||
self._rpc_endpoints)
|
self._rpc_endpoints)
|
||||||
@ -120,7 +120,7 @@ class RPCService(object):
|
|||||||
def start(self):
|
def start(self):
|
||||||
super(RPCService, self).start()
|
super(RPCService, self).start()
|
||||||
|
|
||||||
LOG.debug(_("Starting RPC server on topic '%s'") % self._rpc_topic)
|
LOG.debug("Starting RPC server on topic '%s'" % self._rpc_topic)
|
||||||
self._rpc_server.start()
|
self._rpc_server.start()
|
||||||
|
|
||||||
# TODO(kiall): This probably belongs somewhere else, maybe the base
|
# TODO(kiall): This probably belongs somewhere else, maybe the base
|
||||||
|
@ -103,25 +103,37 @@ class SQLAlchemy(object):
|
|||||||
column = getattr(table.c, name)
|
column = getattr(table.c, name)
|
||||||
|
|
||||||
# Wildcard value: '%'
|
# Wildcard value: '%'
|
||||||
if isinstance(value, basestring) and '%' in value:
|
if isinstance(value, six.string_types) and '%' in value:
|
||||||
query = query.where(column.like(value))
|
query = query.where(column.like(value))
|
||||||
elif isinstance(value, basestring) and value.startswith('!'):
|
|
||||||
|
elif (isinstance(value, six.string_types) and
|
||||||
|
value.startswith('!')):
|
||||||
queryval = value[1:]
|
queryval = value[1:]
|
||||||
query = query.where(column != queryval)
|
query = query.where(column != queryval)
|
||||||
elif isinstance(value, basestring) and value.startswith('<='):
|
|
||||||
|
elif (isinstance(value, six.string_types) and
|
||||||
|
value.startswith('<=')):
|
||||||
queryval = value[2:]
|
queryval = value[2:]
|
||||||
query = query.where(column <= queryval)
|
query = query.where(column <= queryval)
|
||||||
elif isinstance(value, basestring) and value.startswith('<'):
|
|
||||||
|
elif (isinstance(value, six.string_types) and
|
||||||
|
value.startswith('<')):
|
||||||
queryval = value[1:]
|
queryval = value[1:]
|
||||||
query = query.where(column < queryval)
|
query = query.where(column < queryval)
|
||||||
elif isinstance(value, basestring) and value.startswith('>='):
|
|
||||||
|
elif (isinstance(value, six.string_types) and
|
||||||
|
value.startswith('>=')):
|
||||||
queryval = value[2:]
|
queryval = value[2:]
|
||||||
query = query.where(column >= queryval)
|
query = query.where(column >= queryval)
|
||||||
elif isinstance(value, basestring) and value.startswith('>'):
|
|
||||||
|
elif (isinstance(value, six.string_types) and
|
||||||
|
value.startswith('>')):
|
||||||
queryval = value[1:]
|
queryval = value[1:]
|
||||||
query = query.where(column > queryval)
|
query = query.where(column > queryval)
|
||||||
|
|
||||||
elif isinstance(value, list):
|
elif isinstance(value, list):
|
||||||
query = query.where(column.in_(value))
|
query = query.where(column.in_(value))
|
||||||
|
|
||||||
else:
|
else:
|
||||||
query = query.where(column == value)
|
query = query.where(column == value)
|
||||||
|
|
||||||
|
@ -138,7 +138,7 @@ class AdminApiTestCase(ApiTestCase):
|
|||||||
|
|
||||||
x = 0
|
x = 0
|
||||||
length = len(data)
|
length = len(data)
|
||||||
for i in xrange(0, length):
|
for i in range(0, length):
|
||||||
assert data[i]['id'] == response[x]['id']
|
assert data[i]['id'] == response[x]['id']
|
||||||
|
|
||||||
x += 1
|
x += 1
|
||||||
|
@ -141,7 +141,7 @@ class ApiV2TestCase(ApiTestCase):
|
|||||||
|
|
||||||
x = 0
|
x = 0
|
||||||
length = len(data)
|
length = len(data)
|
||||||
for i in xrange(0, length):
|
for i in range(0, length):
|
||||||
assert data[i]['id'] == response[x]['id']
|
assert data[i]['id'] == response[x]['id']
|
||||||
|
|
||||||
x += 1
|
x += 1
|
||||||
|
@ -40,7 +40,7 @@ class ApiV2BlacklistsTest(ApiV2TestCase):
|
|||||||
self.assertEqual(0, len(response.json['blacklists']))
|
self.assertEqual(0, len(response.json['blacklists']))
|
||||||
|
|
||||||
data = [self.create_blacklist(
|
data = [self.create_blacklist(
|
||||||
pattern='x-%s.org.' % i) for i in xrange(0, 10)]
|
pattern='x-%s.org.' % i) for i in range(0, 10)]
|
||||||
|
|
||||||
self._assert_paging(data, '/blacklists', key='blacklists')
|
self._assert_paging(data, '/blacklists', key='blacklists')
|
||||||
|
|
||||||
|
@ -125,7 +125,8 @@ class ApiV2PoolsTest(ApiV2TestCase):
|
|||||||
pool_id)
|
pool_id)
|
||||||
|
|
||||||
# Add the default pool into the list
|
# Add the default pool into the list
|
||||||
data = [self.create_pool(name='x-%s' % i) for i in xrange(0, 10)]
|
data = [self.create_pool(name='x-%s' % i) for i in
|
||||||
|
range(0, 10)]
|
||||||
data.insert(0, default_pool)
|
data.insert(0, default_pool)
|
||||||
|
|
||||||
# Test the paging of the list
|
# Test the paging of the list
|
||||||
|
@ -224,7 +224,7 @@ class ApiV2RecordSetsTest(ApiV2TestCase):
|
|||||||
'type': 'NS'})
|
'type': 'NS'})
|
||||||
data = [self.create_recordset(self.domain,
|
data = [self.create_recordset(self.domain,
|
||||||
name='x-%s.%s' % (i, self.domain['name']))
|
name='x-%s.%s' % (i, self.domain['name']))
|
||||||
for i in xrange(0, 10)]
|
for i in range(0, 10)]
|
||||||
data.insert(0, ns)
|
data.insert(0, ns)
|
||||||
data.insert(0, soa)
|
data.insert(0, soa)
|
||||||
|
|
||||||
@ -718,7 +718,7 @@ class ApiV2RecordSetsTest(ApiV2TestCase):
|
|||||||
'type': 'SOA'})
|
'type': 'SOA'})
|
||||||
data = [self.create_recordset(secondary,
|
data = [self.create_recordset(secondary,
|
||||||
name='x-%s.%s' % (i, secondary['name']))
|
name='x-%s.%s' % (i, secondary['name']))
|
||||||
for i in xrange(0, 10)]
|
for i in range(0, 10)]
|
||||||
data.insert(0, soa)
|
data.insert(0, soa)
|
||||||
|
|
||||||
self._assert_paging(data, url, key='recordsets')
|
self._assert_paging(data, url, key='recordsets')
|
||||||
|
@ -319,7 +319,7 @@ class StorageTestCase(object):
|
|||||||
def test_find_tsigkeys_paging(self):
|
def test_find_tsigkeys_paging(self):
|
||||||
# Create 10 TSIG Keys
|
# Create 10 TSIG Keys
|
||||||
created = [self.create_tsigkey(name='tsig-%s' % i)
|
created = [self.create_tsigkey(name='tsig-%s' % i)
|
||||||
for i in xrange(10)]
|
for i in range(10)]
|
||||||
|
|
||||||
# Ensure we can page through the results.
|
# Ensure we can page through the results.
|
||||||
self._ensure_paging(created, self.storage.find_tsigkeys)
|
self._ensure_paging(created, self.storage.find_tsigkeys)
|
||||||
@ -543,7 +543,7 @@ class StorageTestCase(object):
|
|||||||
def test_find_domains_paging(self):
|
def test_find_domains_paging(self):
|
||||||
# Create 10 Domains
|
# Create 10 Domains
|
||||||
created = [self.create_domain(name='example-%d.org.' % i)
|
created = [self.create_domain(name='example-%d.org.' % i)
|
||||||
for i in xrange(10)]
|
for i in range(10)]
|
||||||
|
|
||||||
# Ensure we can page through the results.
|
# Ensure we can page through the results.
|
||||||
self._ensure_paging(created, self.storage.find_domains)
|
self._ensure_paging(created, self.storage.find_domains)
|
||||||
@ -856,7 +856,7 @@ class StorageTestCase(object):
|
|||||||
|
|
||||||
# Create 10 RecordSets
|
# Create 10 RecordSets
|
||||||
created = [self.create_recordset(domain, name='r-%d.example.org.' % i)
|
created = [self.create_recordset(domain, name='r-%d.example.org.' % i)
|
||||||
for i in xrange(10)]
|
for i in range(10)]
|
||||||
|
|
||||||
# Add in the SOA and NS recordsets that are automatically created
|
# Add in the SOA and NS recordsets that are automatically created
|
||||||
soa = self.storage.find_recordset(self.admin_context,
|
soa = self.storage.find_recordset(self.admin_context,
|
||||||
@ -1255,7 +1255,7 @@ class StorageTestCase(object):
|
|||||||
|
|
||||||
# Create 10 Records
|
# Create 10 Records
|
||||||
created = [self.create_record(domain, recordset, data='192.0.2.%d' % i)
|
created = [self.create_record(domain, recordset, data='192.0.2.%d' % i)
|
||||||
for i in xrange(10)]
|
for i in range(10)]
|
||||||
|
|
||||||
# Add in the SOA and NS records that are automatically created
|
# Add in the SOA and NS records that are automatically created
|
||||||
soa = self.storage.find_recordset(self.admin_context,
|
soa = self.storage.find_recordset(self.admin_context,
|
||||||
@ -1540,7 +1540,7 @@ class StorageTestCase(object):
|
|||||||
def test_find_tlds_paging(self):
|
def test_find_tlds_paging(self):
|
||||||
# Create 10 Tlds
|
# Create 10 Tlds
|
||||||
created = [self.create_tld(name='org%d' % i)
|
created = [self.create_tld(name='org%d' % i)
|
||||||
for i in xrange(10)]
|
for i in range(10)]
|
||||||
|
|
||||||
# Ensure we can page through the results.
|
# Ensure we can page through the results.
|
||||||
self._ensure_paging(created, self.storage.find_tlds)
|
self._ensure_paging(created, self.storage.find_tlds)
|
||||||
@ -1694,7 +1694,7 @@ class StorageTestCase(object):
|
|||||||
def test_find_blacklists_paging(self):
|
def test_find_blacklists_paging(self):
|
||||||
# Create 10 Blacklists
|
# Create 10 Blacklists
|
||||||
created = [self.create_blacklist(pattern='^example-%d.org.' % i)
|
created = [self.create_blacklist(pattern='^example-%d.org.' % i)
|
||||||
for i in xrange(10)]
|
for i in range(10)]
|
||||||
|
|
||||||
# Ensure we can page through the results.
|
# Ensure we can page through the results.
|
||||||
self._ensure_paging(created, self.storage.find_blacklists)
|
self._ensure_paging(created, self.storage.find_blacklists)
|
||||||
@ -1848,7 +1848,7 @@ class StorageTestCase(object):
|
|||||||
|
|
||||||
# Create 10 Pools
|
# Create 10 Pools
|
||||||
created = [self.create_pool(name='test%d' % i)
|
created = [self.create_pool(name='test%d' % i)
|
||||||
for i in xrange(10)]
|
for i in range(10)]
|
||||||
|
|
||||||
# Add in the existing pools
|
# Add in the existing pools
|
||||||
|
|
||||||
@ -2214,7 +2214,7 @@ class StorageTestCase(object):
|
|||||||
def test_find_pool_attributes_paging(self):
|
def test_find_pool_attributes_paging(self):
|
||||||
# Create 10 Pool Attributes
|
# Create 10 Pool Attributes
|
||||||
created = [self.create_pool_attribute(value='^ns%d.example.com.' % i)
|
created = [self.create_pool_attribute(value='^ns%d.example.com.' % i)
|
||||||
for i in xrange(10)]
|
for i in range(10)]
|
||||||
|
|
||||||
# Ensure we can page through the results.
|
# Ensure we can page through the results.
|
||||||
self._ensure_paging(created, self.storage.find_pool_attributes)
|
self._ensure_paging(created, self.storage.find_pool_attributes)
|
||||||
@ -2382,7 +2382,7 @@ class StorageTestCase(object):
|
|||||||
|
|
||||||
def test_find_zone_tasks_paging(self):
|
def test_find_zone_tasks_paging(self):
|
||||||
# Create 10 ZoneTasks
|
# Create 10 ZoneTasks
|
||||||
created = [self.create_zone_task() for i in xrange(10)]
|
created = [self.create_zone_task() for i in range(10)]
|
||||||
|
|
||||||
# Ensure we can page through the results.
|
# Ensure we can page through the results.
|
||||||
self._ensure_paging(created, self.storage.find_zone_tasks)
|
self._ensure_paging(created, self.storage.find_zone_tasks)
|
||||||
|
Loading…
Reference in New Issue
Block a user