Enabled H402 flake8 rule

Added support for flake8 H402: one line docstring needs punctuation.

Reasons:
- H402 is disabled in tox.ini, it needs to be enabled to make code
  more flake8 compliant.

Changes:
- Updated tox.ini to enable H402 rule.
- Updated code for H402 violation.
- Updated one word docstrings to be more meaningful.

Change-Id: I5e67b641717c9704ef4b9f6267390561e4840fff
This commit is contained in:
SushilKM 2014-05-26 18:51:52 +00:00
parent 5f019cc8bb
commit 25b2f2c2a8
54 changed files with 133 additions and 132 deletions

View File

@ -41,7 +41,7 @@ commands = {posargs}
[flake8]
show-source = True
ignore = F821,H301,H306,H402,H404
ignore = F821,H301,H306,H404
builtins = _
exclude=.venv,.tox,dist,doc,openstack,*egg,rsdns,tools,etc,build
filename=*.py,trove-*

View File

@ -246,7 +246,7 @@ def persisted_models():
class DBBackup(DatabaseModelBase):
"""A table for Backup records"""
"""A table for Backup records."""
_data_fields = ['id', 'name', 'description', 'location', 'backup_type',
'size', 'tenant_id', 'state', 'instance_id',
'checksum', 'backup_timestamp', 'deleted', 'created',

View File

@ -26,7 +26,7 @@ from trove.datastore.service import DatastoreController
class API(wsgi.Router):
"""API"""
"""Defines the API routes."""
def __init__(self):
mapper = routes.Mapper()
super(API, self).__init__(mapper)

View File

@ -166,7 +166,7 @@ class Configuration(object):
@staticmethod
def get_configuration_overrides(context, configuration_id):
"""Gets the overrides dict to apply to an instance"""
"""Gets the overrides dictionary to apply to an instance."""
overrides = {}
if configuration_id:
config_items = Configuration.load_items(context,

View File

@ -103,7 +103,7 @@ class DesignateDriver(driver.DnsDriver):
self.dns_client.records.delete(dns_zone.id, matching_record[0].id)
def get_entries_by_content(self, content, dns_zone=None):
"""Retrieves all entries in a dns_zone with a matching content field"""
"""Retrieves all entries in a DNS zone with matching content field."""
records = self._get_records(dns_zone)
return [self.converter.record_to_entry(record, dns_zone)
for record in records if record.data == content]

View File

@ -33,7 +33,7 @@ class DnsDriver(object):
pass
def get_entries_by_content(self, content, dns_zone=None):
"""Retrieves all entries in a dns_zone with a matching content field"""
"""Retrieves all entries in a DNS zone with matching content field."""
pass
def get_entries_by_name(self, name, dns_zone=None):

View File

@ -26,7 +26,7 @@ LOG = logging.getLogger(__name__)
class AccountController(wsgi.Controller):
"""Controller for account functionality"""
"""Controller for account functionality."""
schemas = apischema.account
@admin_context

View File

@ -26,7 +26,7 @@ LOG = logging.getLogger(__name__)
class HostController(InstanceController):
"""Controller for instance functionality"""
"""Controller for instance functionality."""
@admin_context
def index(self, req, tenant_id, detailed=False):

View File

@ -36,7 +36,7 @@ LOG = logging.getLogger(__name__)
class MgmtInstanceController(InstanceController):
"""Controller for instance functionality"""
"""Controller for instance functionality."""
schemas = apischema.mgmt_instance
@classmethod

View File

@ -25,7 +25,7 @@ LOG = logging.getLogger(__name__)
class QuotaController(wsgi.Controller):
"""Controller for quota functionality"""
"""Controller for quota functionality."""
@admin_context
def show(self, req, tenant_id, id):

View File

@ -25,7 +25,7 @@ LOG = logging.getLogger(__name__)
class StorageController(wsgi.Controller):
"""Controller for storage device functionality"""
"""Controller for storage device functionality."""
@admin_context
def index(self, req, tenant_id):

View File

@ -44,7 +44,7 @@ def populate_validated_databases(dbs):
def populate_users(users, initial_databases=None):
"""Create a serializable request containing users"""
"""Create a serializable request containing users."""
users_data = []
unique_identities = set()
for user in users:

View File

@ -34,7 +34,7 @@ LOG = logging.getLogger(__name__)
class RootController(wsgi.Controller):
"""Controller for instance functionality"""
"""Controller for instance functionality."""
def index(self, req, tenant_id, instance_id):
"""Returns True if root is enabled for the given instance;
@ -47,7 +47,7 @@ class RootController(wsgi.Controller):
return wsgi.Result(views.RootEnabledView(is_root_enabled).data(), 200)
def create(self, req, tenant_id, instance_id):
"""Enable the root user for the db instance """
"""Enable the root user for the db instance."""
LOG.info(_("Enabling root for instance '%s'") % instance_id)
LOG.info(_("req : '%s'\n\n") % req)
context = req.environ[wsgi.CONTEXT_KEY]
@ -57,7 +57,7 @@ class RootController(wsgi.Controller):
class UserController(wsgi.Controller):
"""Controller for instance functionality"""
"""Controller for instance functionality."""
schemas = apischema.user
@classmethod
@ -80,7 +80,7 @@ class UserController(wsgi.Controller):
return wsgi.Result(paged.data(), 200)
def create(self, req, body, tenant_id, instance_id):
"""Creates a set of users"""
"""Creates a set of users."""
LOG.info(_("Creating users for instance '%s'") % instance_id)
LOG.info(logging.mask_password(_("req : '%s'\n\n") % req))
LOG.info(logging.mask_password(_("body : '%s'\n\n") % body))
@ -253,7 +253,7 @@ class UserAccessController(wsgi.Controller):
class SchemaController(wsgi.Controller):
"""Controller for instance functionality"""
"""Controller for instance functionality."""
schemas = apischema.dbschema
def index(self, req, tenant_id, instance_id):
@ -268,7 +268,7 @@ class SchemaController(wsgi.Controller):
return wsgi.Result(paged.data(), 200)
def create(self, req, body, tenant_id, instance_id):
"""Creates a set of schemas"""
"""Creates a set of schemas."""
LOG.info(_("Creating schema for instance '%s'") % instance_id)
LOG.info(_("req : '%s'\n\n") % req)
LOG.info(_("body : '%s'\n\n") % body)

View File

@ -228,7 +228,7 @@ class RemoteSecurityGroup(NovaRemoteModelBase):
@classmethod
def create(cls, name, description, context):
"""Creates a new Security Group"""
"""Creates a new Security Group."""
client = trove.common.remote.create_nova_client(context)
try:
sec_group = client.security_groups.create(name=name,

View File

@ -29,7 +29,7 @@ CONF = cfg.CONF
class SecurityGroupController(wsgi.Controller):
"""Controller for security groups functionality"""
"""Controller for security groups functionality."""
def index(self, req, tenant_id):
"""Return all security groups tied to a particular tenant_id."""
@ -61,7 +61,7 @@ class SecurityGroupController(wsgi.Controller):
class SecurityGroupRuleController(wsgi.Controller):
"""Controller for security group rule functionality"""
"""Controller for security group rule functionality."""
def delete(self, req, tenant_id, id):
LOG.debug("Delete Security Group Rule called %s, %s" % (tenant_id, id))

View File

@ -33,7 +33,7 @@ class SecurityGroupView(object):
self.tenant_id = tenant_id
def _build_links(self):
"""Build the links for the secgroup"""
"""Build the links for the secgroup."""
base_url = _base_url(self.request)
href = os.path.join(base_url, self.tenant_id,
"security-groups", str(self.secgroup['id']))

View File

@ -21,7 +21,7 @@ from trove.flavor import views
class FlavorController(wsgi.Controller):
"""Controller for flavor functionality"""
"""Controller for flavor functionality."""
def show(self, req, tenant_id, id):
"""Return a single flavor."""

View File

@ -103,7 +103,7 @@ class API(proxy.RpcProxy):
rd_rpc.delete_queue(self.context, topic)
def _get_routing_key(self):
"""Create the routing key based on the container id"""
"""Create the routing key based on the container id."""
return "guestagent.%s" % self.id
def _check_for_hearbeat(self):
@ -161,13 +161,13 @@ class API(proxy.RpcProxy):
database=database)
def list_users(self, limit=None, marker=None, include_marker=False):
"""Make an asynchronous call to list database users"""
"""Make an asynchronous call to list database users."""
LOG.debug("Listing Users for Instance %s", self.id)
return self._call("list_users", AGENT_LOW_TIMEOUT, limit=limit,
marker=marker, include_marker=include_marker)
def delete_user(self, user):
"""Make an asynchronous call to delete an existing database user"""
"""Make an asynchronous call to delete an existing database user."""
LOG.debug("Deleting user %(user)s for Instance %(instance_id)s" %
{'user': user, 'instance_id': self.id})
self._cast("delete_user", user=user)
@ -180,7 +180,7 @@ class API(proxy.RpcProxy):
self._cast("create_database", databases=databases)
def list_databases(self, limit=None, marker=None, include_marker=False):
"""Make an asynchronous call to list databases"""
"""Make an asynchronous call to list databases."""
LOG.debug("Listing databases for Instance %s", self.id)
return self._call("list_databases", AGENT_LOW_TIMEOUT, limit=limit,
marker=marker, include_marker=include_marker)
@ -268,12 +268,12 @@ class API(proxy.RpcProxy):
do_not_start_on_reboot=do_not_start_on_reboot)
def upgrade(self):
"""Make an asynchronous call to self upgrade the guest agent"""
"""Make an asynchronous call to self upgrade the guest agent."""
LOG.debug("Sending an upgrade call to nova-guest")
self._cast_with_consumer("upgrade")
def get_volume_info(self):
"""Make a synchronous call to get volume info for the container"""
"""Make a synchronous call to get volume info for the container."""
LOG.debug("Check Volume Info on Instance %s", self.id)
# self._check_for_hearbeat()
return self._call("get_filesystem_stats", AGENT_LOW_TIMEOUT,
@ -284,34 +284,35 @@ class API(proxy.RpcProxy):
self._call("update_guest", AGENT_HIGH_TIMEOUT)
def create_backup(self, backup_info):
"""Make async call to create a full backup of this instance"""
"""Make async call to create a full backup of this instance."""
LOG.debug("Create Backup %(backup_id)s "
"for Instance %(instance_id)s" %
{'backup_id': backup_info['id'], 'instance_id': self.id})
self._cast("create_backup", backup_info=backup_info)
def mount_volume(self, device_path=None, mount_point=None):
"""Mount the volume"""
"""Mount the volume."""
LOG.debug("Mount volume %(mount)s on instance %(id)s" % {
'mount': mount_point, 'id': self.id})
self._call("mount_volume", AGENT_LOW_TIMEOUT,
device_path=device_path, mount_point=mount_point)
def unmount_volume(self, device_path=None, mount_point=None):
"""Unmount the volume"""
"""Unmount the volume."""
LOG.debug("Unmount volume %(device)s on instance %(id)s" % {
'device': device_path, 'id': self.id})
self._call("unmount_volume", AGENT_LOW_TIMEOUT,
device_path=device_path, mount_point=mount_point)
def resize_fs(self, device_path=None, mount_point=None):
"""Resize the filesystem"""
"""Resize the filesystem."""
LOG.debug("Resize device %(device)s on instance %(id)s" % {
'device': device_path, 'id': self.id})
self._call("resize_fs", AGENT_HIGH_TIMEOUT, device_path=device_path,
mount_point=mount_point)
def update_overrides(self, overrides, remove=False):
"""Update the overrides."""
LOG.debug("Updating overrides on Instance %s", self.id)
LOG.debug("Updating overrides values %s" % overrides)
self._cast("update_overrides", overrides=overrides, remove=remove)

View File

@ -37,14 +37,14 @@ class Manager(periodic_task.PeriodicTasks):
@periodic_task.periodic_task(ticks_between_runs=3)
def update_status(self, context):
"""Update the status of the Cassandra service"""
"""Update the status of the Cassandra service."""
self.appStatus.update()
def restart(self, context):
self.app.restart()
def get_filesystem_stats(self, context, fs_path):
"""Gets the filesystem stats for the path given. """
"""Gets the filesystem stats for the path given."""
mount_point = CONF.get(
'mysql' if not MANAGER else MANAGER).mount_point
return dbaas.get_filesystem_volume_stats(mount_point)

View File

@ -36,12 +36,12 @@ class CassandraApp(object):
"""Prepares DBaaS on a Guest container."""
def __init__(self, status):
"""By default login with root no password for initial setup. """
"""By default login with root no password for initial setup."""
self.state_change_wait_time = CONF.state_change_wait_time
self.status = status
def install_if_needed(self, packages):
"""Prepare the guest machine with a cassandra server installation"""
"""Prepare the guest machine with a cassandra server installation."""
LOG.info(_("Preparing Guest as Cassandra Server"))
if not packager.pkg_is_installed(packages):
self._install_db(packages)
@ -128,7 +128,7 @@ class CassandraApp(object):
LOG.info(_('Overriding old config'))
def read_conf(self):
"""Returns cassandra.yaml in dict structure"""
"""Returns cassandra.yaml in dict structure."""
LOG.info(_("Opening cassandra.yaml"))
with open(system.CASSANDRA_CONF, 'r') as config:
@ -137,7 +137,7 @@ class CassandraApp(object):
return yamled
def update_config_with_single(self, key, value):
"""Updates single key:value in cassandra.yaml"""
"""Updates single key:value in 'cassandra.yaml'."""
yamled = self.read_conf()
yamled.update({key: value})
@ -148,7 +148,7 @@ class CassandraApp(object):
self.write_config(dump)
def update_conf_with_group(self, group):
"""Updates group of key:value in cassandra.yaml"""
"""Updates group of key:value in 'cassandra.yaml'."""
yamled = self.read_conf()
for key, value in group.iteritems():

View File

@ -98,7 +98,7 @@ class Manager(periodic_task.PeriodicTasks):
self.app.stop_db(do_not_start_on_reboot=do_not_start_on_reboot)
def get_filesystem_stats(self, context, fs_path):
"""Gets the filesystem stats for the path given. """
"""Gets the filesystem stats for the path given."""
mount_point = CONF.get(
'mysql' if not MANAGER else MANAGER).mount_point
return dbaas.get_filesystem_volume_stats(mount_point)

View File

@ -39,7 +39,7 @@ class Manager(periodic_task.PeriodicTasks):
@periodic_task.periodic_task(ticks_between_runs=3)
def update_status(self, context):
"""Update the status of the MongoDB service"""
"""Update the status of the MongoDB service."""
self.status.update()
def prepare(self, context, packages, databases, memory_mb, users,
@ -89,7 +89,7 @@ class Manager(periodic_task.PeriodicTasks):
self.app.reset_configuration(configuration)
def get_filesystem_stats(self, context, fs_path):
"""Gets the filesystem stats for the path given """
"""Gets the filesystem stats for the path given."""
return dbaas.get_filesystem_volume_stats(system.MONGODB_MOUNT_POINT)
def change_passwords(self, context, users):

View File

@ -38,7 +38,7 @@ class MongoDBApp(object):
self.status = status
def install_if_needed(self, packages):
"""Prepare the guest machine with a MongoDB installation"""
"""Prepare the guest machine with a MongoDB installation."""
LOG.info(_("Preparing Guest as MongoDB"))
if not system.PACKAGER.pkg_is_installed(packages):
LOG.debug("Installing packages: %s" % str(packages))

View File

@ -39,7 +39,7 @@ class Manager(periodic_task.PeriodicTasks):
@periodic_task.periodic_task(ticks_between_runs=3)
def update_status(self, context):
"""Update the status of the MySQL service"""
"""Update the status of the MySQL service."""
MySqlAppStatus.get().update()
def change_passwords(self, context, users):
@ -164,7 +164,7 @@ class Manager(periodic_task.PeriodicTasks):
app.stop_db(do_not_start_on_reboot=do_not_start_on_reboot)
def get_filesystem_stats(self, context, fs_path):
"""Gets the filesystem stats for the path given. """
"""Gets the filesystem stats for the path given."""
mount_point = CONF.get(
'mysql' if not MANAGER else MANAGER).mount_point
return dbaas.get_filesystem_volume_stats(mount_point)

View File

@ -124,7 +124,7 @@ class Manager(periodic_task.PeriodicTasks):
app.stop_db(do_not_start_on_reboot=do_not_start_on_reboot)
def get_filesystem_stats(self, context, fs_path):
"""Gets the filesystem stats for the path given. """
"""Gets the filesystem stats for the path given."""
mount_point = CONF.get(
'mysql' if not MANAGER else MANAGER).mount_point
return dbaas.get_filesystem_volume_stats(mount_point)

View File

@ -30,7 +30,7 @@ class Base(object):
class MySQLDatabase(Base):
"""Represents a Database and its properties"""
"""Represents a Database and its properties."""
_ignore_dbs = CONF.ignore_dbs
@ -287,7 +287,7 @@ class MySQLDatabase(Base):
@property
def collate(self):
"""Get the appropriate collate value"""
"""Get the appropriate collate value."""
if not self._collate and not self._character_set:
return self.__collation__
elif not self._collate:
@ -297,7 +297,7 @@ class MySQLDatabase(Base):
@collate.setter
def collate(self, value):
"""Validate the collation and set it"""
"""Validate the collation and set it."""
if not value:
pass
elif self._character_set:
@ -313,7 +313,7 @@ class MySQLDatabase(Base):
@property
def character_set(self):
"""Get the appropriate character set value"""
"""Get the appropriate character set value."""
if not self._character_set:
return self.__charset__
else:
@ -321,7 +321,7 @@ class MySQLDatabase(Base):
@character_set.setter
def character_set(self, value):
"""Validate the character set and set it"""
"""Validate the character set and set it."""
if not value:
pass
elif not value in self.charset:
@ -346,7 +346,7 @@ class ValidatedMySQLDatabase(MySQLDatabase):
class MySQLUser(Base):
"""Represents a MySQL User and its associated properties"""
"""Represents a MySQL User and its associated properties."""
not_supported_chars = re.compile("^\s|\s$|'|\"|;|`|,|/|\\\\")
_ignore_users = CONF.ignore_users

View File

@ -27,7 +27,7 @@ class Controller(wsgi.Controller):
class API(wsgi.Router):
"""API"""
"""Defines the API routes."""
def __init__(self):
mapper = routes.Mapper()
super(API, self).__init__(mapper)

View File

@ -31,11 +31,11 @@ class BackupError(Exception):
class UnknownBackupType(Exception):
"""Unknown backup type"""
"""Unknown backup type."""
class BackupRunner(Strategy):
"""Base class for Backup Strategy implementations """
"""Base class for Backup Strategy implementations."""
__strategy_type__ = 'backup_runner'
__strategy_ns__ = 'trove.guestagent.strategies.backup'
@ -65,7 +65,7 @@ class BackupRunner(Strategy):
self.pid = self.process.pid
def __enter__(self):
"""Start up the process"""
"""Start up the process."""
self._run_pre_backup()
self.run()
self._run_post_backup()
@ -99,7 +99,7 @@ class BackupRunner(Strategy):
@property
def filename(self):
"""Subclasses may overwrite this to declare a format (.tar)"""
"""Subclasses may overwrite this to declare a format (.tar)."""
return self.base_filename
@property

View File

@ -26,7 +26,7 @@ LOG = logging.getLogger(__name__)
class MySQLDump(base.BackupRunner):
"""Implementation of Backup Strategy for MySQLDump """
"""Implementation of Backup Strategy for MySQLDump."""
__strategy_name__ = 'mysqldump'
@property
@ -44,7 +44,7 @@ class MySQLDump(base.BackupRunner):
class InnoBackupEx(base.BackupRunner):
"""Implementation of Backup Strategy for InnoBackupEx """
"""Implementation of Backup Strategy for InnoBackupEx."""
__strategy_name__ = 'innobackupex'
@property
@ -57,7 +57,7 @@ class InnoBackupEx(base.BackupRunner):
return cmd + self.zip_cmd + self.encrypt_cmd
def check_process(self):
"""Check the output from innobackupex for 'completed OK!'"""
"""Check the output from innobackupex for 'completed OK!'."""
LOG.debug('Checking innobackupex process output')
with open('/tmp/innobackupex.log', 'r') as backup_log:
output = backup_log.read()

View File

@ -43,7 +43,7 @@ class RestoreError(Exception):
class RestoreRunner(Strategy):
"""Base class for Restore Strategy implementations """
"""Base class for Restore Strategy implementations."""
"""Restore a database from a previous backup."""
__strategy_type__ = 'restore_runner'
@ -72,11 +72,11 @@ class RestoreRunner(Strategy):
super(RestoreRunner, self).__init__()
def pre_restore(self):
"""Hook that is called before the restore command"""
"""Hook that is called before the restore command."""
pass
def post_restore(self):
"""Hook that is called after the restore command"""
"""Hook that is called after the restore command."""
pass
def restore(self):

View File

@ -30,7 +30,7 @@ LOG = logging.getLogger(__name__)
class MySQLRestoreMixin(object):
"""Common utils for restoring MySQL databases"""
"""Common utils for restoring MySQL databases."""
RESET_ROOT_RETRY_TIMEOUT = 100
RESET_ROOT_SLEEP_INTERVAL = 10
RESET_ROOT_MYSQL_COMMAND = ("SET PASSWORD FOR"
@ -97,13 +97,13 @@ class MySQLRestoreMixin(object):
class MySQLDump(base.RestoreRunner, MySQLRestoreMixin):
"""Implementation of Restore Strategy for MySQLDump"""
"""Implementation of Restore Strategy for MySQLDump."""
__strategy_name__ = 'mysqldump'
base_restore_cmd = 'sudo mysql'
class InnoBackupEx(base.RestoreRunner, MySQLRestoreMixin):
"""Implementation of Restore Strategy for InnoBackupEx"""
"""Implementation of Restore Strategy for InnoBackupEx."""
__strategy_name__ = 'innobackupex'
base_restore_cmd = 'sudo xbstream -x -C %(restore_location)s'
base_prepare_cmd = ('sudo innobackupex --apply-log %(restore_location)s'

View File

@ -19,7 +19,7 @@ from trove.guestagent.strategy import Strategy
class Storage(Strategy):
"""Base class for Storage Strategy implementation """
"""Base class for Storage Strategy implementation."""
__strategy_type__ = 'storage'
__strategy_ns__ = 'trove.guestagent.strategies.storage'
@ -29,11 +29,11 @@ class Storage(Strategy):
@abc.abstractmethod
def save(self, filename, stream):
"""Persist information from the stream """
"""Persist information from the stream."""
@abc.abstractmethod
def load(self, location, backup_checksum):
"""Load a stream from a persisted storage location """
"""Load a stream from a persisted storage location."""
@abc.abstractmethod
def load_metadata(self, location, backup_checksum):

View File

@ -89,7 +89,7 @@ class StreamReader(object):
class SwiftStorage(base.Storage):
"""Implementation of Storage Strategy for Swift """
"""Implementation of Storage Strategy for Swift."""
__strategy_name__ = 'swift'
def __init__(self, *args, **kwargs):
@ -183,7 +183,7 @@ class SwiftStorage(base.Storage):
return True
def load(self, location, backup_checksum):
"""Restore a backup from the input stream to the restore_location"""
"""Restore a backup from the input stream to the restore_location."""
storage_url, container, filename = self._explodeLocation(location)
headers, info = self.connection.get_object(container, filename,
@ -195,13 +195,13 @@ class SwiftStorage(base.Storage):
return info
def _get_attr(self, original):
"""Get a friendly name from an object header key"""
"""Get a friendly name from an object header key."""
key = original.replace('-', '_')
key = key.replace('x_object_meta_', '')
return key
def _set_attr(self, original):
"""Return a swift friendly header key"""
"""Return a swift friendly header key."""
key = original.replace('_', '-')
return 'X-Object-Meta-%s' % key

View File

@ -36,7 +36,7 @@ class VolumeDevice(object):
self.device_path = device_path
def migrate_data(self, mysql_base):
"""Synchronize the data from the mysql directory to the new volume """
"""Synchronize the data from the mysql directory to the new volume."""
self.mount(TMP_MOUNT_POINT, write_to_fstab=False)
if not mysql_base[-1] == '/':
mysql_base = "%s/" % mysql_base
@ -102,7 +102,7 @@ class VolumeDevice(object):
mount_point.write_to_fstab()
def resize_fs(self, mount_point):
"""Resize the filesystem on the specified device"""
"""Resize the filesystem on the specified device."""
self._check_device_exists()
try:
# check if the device is mounted at mount_point before e2fsck

View File

@ -37,7 +37,7 @@ LOG = logging.getLogger(__name__)
class InstanceController(wsgi.Controller):
"""Controller for instance functionality"""
"""Controller for instance functionality."""
schemas = apischema.instance.copy()
if not CONF.trove_volume_support:
# see instance.models.create for further validation around this

View File

@ -320,7 +320,7 @@ QUOTAS.register_resources(resources)
def run_with_quotas(tenant_id, deltas, f):
"""Quota wrapper """
"""Quota wrapper."""
reservations = QUOTAS.reserve(tenant_id, **deltas)
result = None

View File

@ -55,7 +55,7 @@ class CreateBackups(object):
@test
def test_backup_create_instance_invalid(self):
"""test create backup with unknown instance"""
"""Test create backup with unknown instance."""
invalid_inst_id = 'invalid-inst-id'
try:
instance_info.dbaas.backups.create(BACKUP_NAME, invalid_inst_id,
@ -73,13 +73,13 @@ class CreateBackups(object):
@test
def test_backup_create_instance_not_found(self):
"""test create backup with unknown instance"""
"""Test create backup with unknown instance."""
assert_raises(exceptions.NotFound, instance_info.dbaas.backups.create,
BACKUP_NAME, generate_uuid(), BACKUP_DESC)
@test
def test_backup_create_instance(self):
"""test create backup for a given instance"""
"""Test create backup for a given instance."""
# Necessary to test that the count increases.
global backup_count_prior_to_create
backup_count_prior_to_create = len(instance_info.dbaas.backups.list())
@ -106,20 +106,20 @@ class AfterBackupCreation(object):
@test
def test_instance_action_right_after_backup_create(self):
"""test any instance action while backup is running"""
"""Test any instance action while backup is running."""
assert_unprocessable(instance_info.dbaas.instances.resize_instance,
instance_info.id, 1)
@test
def test_backup_create_another_backup_running(self):
"""test create backup when another backup is running"""
"""Test create backup when another backup is running."""
assert_unprocessable(instance_info.dbaas.backups.create,
'backup_test2', instance_info.id,
'test description2')
@test
def test_backup_delete_still_running(self):
"""test delete backup when it is running"""
"""Test delete backup when it is running."""
result = instance_info.dbaas.backups.list()
backup = result[0]
assert_unprocessable(instance_info.dbaas.backups.delete, backup.id)
@ -153,7 +153,7 @@ class ListBackups(object):
@test
def test_backup_list(self):
"""test list backups"""
"""Test list backups."""
result = instance_info.dbaas.backups.list()
assert_equal(backup_count_prior_to_create + 1, len(result))
backup = result[0]
@ -165,7 +165,7 @@ class ListBackups(object):
@test
def test_backup_list_for_instance(self):
"""test backup list for instance"""
"""Test backup list for instance."""
result = instance_info.dbaas.instances.backups(instance_info.id)
assert_equal(backup_count_for_instance_prior_to_create + 1,
len(result))
@ -178,7 +178,7 @@ class ListBackups(object):
@test
def test_backup_get(self):
"""test get backup"""
"""Test get backup."""
backup = instance_info.dbaas.backups.get(backup_info.id)
assert_equal(backup_info.id, backup.id)
assert_equal(backup_info.name, backup.name)
@ -329,7 +329,7 @@ class DeleteRestoreInstance(object):
@classmethod
def _delete(cls, instance_id):
"""test delete restored instance"""
"""Test delete restored instance."""
instance_info.dbaas.instances.delete(instance_id)
assert_equal(202, instance_info.dbaas.last_http_code)
@ -365,13 +365,13 @@ class DeleteBackups(object):
@test
def test_backup_delete_not_found(self):
"""test delete unknown backup"""
"""Test delete unknown backup."""
assert_raises(exceptions.NotFound, instance_info.dbaas.backups.delete,
'nonexistent_backup')
@test
def test_backup_delete_other(self):
"""Test another user cannot delete backup"""
"""Test another user cannot delete backup."""
# Test to make sure that user in other tenant is not able
# to DELETE this backup
reqs = Requirements(is_admin=False)
@ -384,7 +384,7 @@ class DeleteBackups(object):
@test(runs_after=[test_backup_delete_other])
def test_backup_delete(self):
"""test backup deletion"""
"""Test backup deletion."""
instance_info.dbaas.backups.delete(backup_info.id)
assert_equal(202, instance_info.dbaas.last_http_code)
@ -399,7 +399,7 @@ class DeleteBackups(object):
@test(runs_after=[test_backup_delete])
def test_incremental_deleted(self):
"""test backup children are deleted"""
"""Test backup children are deleted."""
if incremental_info is None:
raise SkipTest("Incremental Backup not created")
assert_raises(exceptions.NotFound, instance_info.dbaas.backups.get,

View File

@ -138,7 +138,7 @@ class CreateConfigurations(object):
@test
def test_expected_configurations_parameters(self):
"""test get expected configurations parameters"""
"""Test get expected configurations parameters."""
expected_attrs = ["configuration-parameters"]
instance_info.dbaas.configuration_parameters.parameters(
instance_info.dbaas_datastore,
@ -182,21 +182,21 @@ class CreateConfigurations(object):
@test
def test_configurations_create_invalid_values(self):
"""test create configurations with invalid values"""
"""Test create configurations with invalid values."""
values = '{"this_is_invalid": 123}'
assert_unprocessable(instance_info.dbaas.configurations.create,
CONFIG_NAME, values, CONFIG_DESC)
@test
def test_configurations_create_invalid_value_type(self):
"""test create configuration with invalild value type"""
"""Test create configuration with invalild value type."""
values = '{"key_buffer_size": "this is a string not int"}'
assert_unprocessable(instance_info.dbaas.configurations.create,
CONFIG_NAME, values, CONFIG_DESC)
@test
def test_configurations_create_value_out_of_bounds(self):
"""test create configuration with value out of bounds"""
"""Test create configuration with value out of bounds."""
values = '{"connect_timeout": 1000000}'
assert_unprocessable(instance_info.dbaas.configurations.create,
CONFIG_NAME, values, CONFIG_DESC)

View File

@ -26,7 +26,7 @@ from trove.tests.api.versions import Versions
@test(groups=['dbaas.api.headers'])
def must_work_with_blank_accept_headers():
"""Test to make sure that trove works without the headers"""
"""Test to make sure that trove works without the headers."""
versions = Versions()
versions.setUp()
client = versions.client

View File

@ -909,7 +909,7 @@ class TestAfterInstanceCreatedGuestData(object):
@test(depends_on_classes=[WaitForGuestInstallationToFinish],
groups=[GROUP, GROUP_START, GROUP_START_SIMPLE, "dbaas.listing"])
class TestInstanceListing(object):
"""Test the listing of the instance information """
"""Test the listing of the instance information."""
@before_class
def setUp(self):
@ -1062,7 +1062,7 @@ class TestCreateNotification(object):
@test(depends_on_groups=['dbaas.api.instances.actions'],
groups=[GROUP, tests.INSTANCES, "dbaas.diagnostics"])
class CheckDiagnosticsAfterTests(object):
"""Check the diagnostics after running api commands on an instance. """
"""Check the diagnostics after running api commands on an instance."""
@test
def test_check_diagnostics_on_instance_after_tests(self):
diagnostics = dbaas_admin.diagnostics.get(instance_info.id)
@ -1078,7 +1078,7 @@ class CheckDiagnosticsAfterTests(object):
runs_after_groups=[GROUP_START,
GROUP_START_SIMPLE, GROUP_TEST, tests.INSTANCES])
class DeleteInstance(object):
"""Delete the created instance """
"""Delete the created instance."""
@time_out(3 * 60)
@test
@ -1241,7 +1241,7 @@ class VerifyInstanceMgmtInfo(object):
class CheckInstance(AttrCheck):
"""Class to check various attributes of Instance details"""
"""Class to check various attributes of Instance details."""
def __init__(self, instance):
super(CheckInstance, self).__init__()

View File

@ -585,7 +585,7 @@ def resize_should_not_delete_users():
groups=[GROUP, tests.INSTANCES],
enabled=VOLUME_SUPPORT)
class ResizeInstanceVolume(ActionTestBase):
"""Resize the volume of the instance """
"""Resize the volume of the instance."""
@before_class
def setUp(self):

View File

@ -83,7 +83,7 @@ class Limits(object):
@test
def test_limits_index(self):
"""test_limits_index"""
"""Test_limits_index."""
limits = self.rd_client.limits.list()
d = self._get_limits_as_dict(limits)
@ -104,7 +104,7 @@ class Limits(object):
@test
def test_limits_get_remaining(self):
"""test_limits_get_remaining"""
"""Test_limits_get_remaining."""
limits = ()
for i in xrange(5):
@ -125,7 +125,7 @@ class Limits(object):
@test
def test_limits_exception(self):
"""test_limits_exception"""
"""Test_limits_exception."""
# use a different user to avoid throttling tests run out of order
rate_user_exceeded = self._get_user('rate_limit_exceeded')

View File

@ -212,7 +212,7 @@ class AccountWithBrokenInstance(object):
@test
def no_compute_instance_no_problem(self):
'''Get account by ID shows even instances lacking computes'''
"""Get account by ID shows even instances lacking computes."""
if test_config.auth_strategy == "fake":
raise SkipTest("Skipping this as auth is faked anyway.")
account_info = self.client.accounts.show(self.user.tenant_id)

View File

@ -42,17 +42,17 @@ class TestAdminRequired(object):
@test
def test_accounts_show(self):
"""A regular user may not view the details of any account. """
"""A regular user may not view the details of any account."""
assert_raises(Unauthorized, self.dbaas.accounts.show, 0)
@test
def test_hosts_index(self):
"""A regular user may not view the list of hosts. """
"""A regular user may not view the list of hosts."""
assert_raises(Unauthorized, self.dbaas.hosts.index)
@test
def test_hosts_get(self):
"""A regular user may not view the details of any host. """
"""A regular user may not view the details of any host."""
assert_raises(Unauthorized, self.dbaas.hosts.get, 0)
@test
@ -74,25 +74,25 @@ class TestAdminRequired(object):
@test
def test_mgmt_instance_reboot(self):
"""A regular user may not perform an instance reboot. """
"""A regular user may not perform an instance reboot."""
assert_raises(Unauthorized, self.dbaas.management.reboot, 0)
@test
def test_mgmt_instance_reset_task_status(self):
"""A regular user may not perform an instance task status reset. """
"""A regular user may not perform an instance task status reset."""
assert_raises(Unauthorized, self.dbaas.management.reset_task_status, 0)
@test
def test_storage_index(self):
"""A regular user may not view the list of storage available. """
"""A regular user may not view the list of storage available."""
assert_raises(Unauthorized, self.dbaas.storage.index)
@test
def test_diagnostics_get(self):
"""A regular user may not view the diagnostics. """
"""A regular user may not view the diagnostics."""
assert_raises(Unauthorized, self.dbaas.diagnostics.get, 0)
@test
def test_hwinfo_get(self):
"""A regular user may not view the hardware info. """
"""A regular user may not view the hardware info."""
assert_raises(Unauthorized, self.dbaas.hwinfo.get, 0)

View File

@ -39,7 +39,7 @@ GROUP = "dbaas.api.mgmt.instances"
@test(groups=[GROUP])
def mgmt_index_requires_admin_account():
"""Verify that an admin context is required to call this function. """
"""Verify that an admin context is required to call this function."""
client = create_client(is_admin=False)
assert_raises(exceptions.Unauthorized, client.management.index)
@ -72,7 +72,7 @@ def volume_check(volume):
@test(depends_on_groups=[GROUP_START], groups=[GROUP, GROUP_TEST])
def mgmt_instance_get():
"""Tests the mgmt instances index method. """
"""Tests the mgmt instances index method."""
reqs = Requirements(is_admin=True)
user = CONFIG.users.find_user(reqs)
client = create_dbaas_client(user)
@ -199,7 +199,7 @@ class WhenMgmtInstanceGetIsCalledButServerIsNotReady(object):
@test(depends_on_classes=[CreateInstance], groups=[GROUP])
class MgmtInstancesIndex(object):
"""Tests the mgmt instances index method. """
"""Tests the mgmt instances index method."""
@before_class
def setUp(self):

View File

@ -66,7 +66,7 @@ class TestRoot(object):
@test
def test_root_initially_disabled(self):
"""Test that root is disabled"""
"""Test that root is disabled."""
enabled = self.dbaas.root.is_root_enabled(instance_info.id)
assert_equal(200, self.dbaas.last_http_code)
@ -98,7 +98,7 @@ class TestRoot(object):
@test(depends_on=[test_root_initially_disabled_details])
def test_root_disabled_in_mgmt_api(self):
"""Verifies in the management api that the timestamp exists"""
"""Verifies in the management api that the timestamp exists."""
self._verify_root_timestamp(instance_info.id)
@test(depends_on=[test_root_initially_disabled_details])

View File

@ -87,21 +87,21 @@ class TestRootOnCreate(object):
@test
def test_root_on_create(self):
"""Test that root is enabled after instance creation"""
"""Test that root is enabled after instance creation."""
enabled = self.enabled(self.instance_id).rootEnabled
assert_equal(200, self.dbaas.last_http_code)
assert_true(enabled)
@test(depends_on=[test_root_on_create])
def test_history_after_root_on_create(self):
"""Test that the timestamp in the root enabled history is set"""
"""Test that the timestamp in the root enabled history is set."""
self.root_enabled_timestamp = self.history(self.instance_id).enabled
assert_equal(200, self.dbaas.last_http_code)
assert_not_equal(self.root_enabled_timestamp, 'Never')
@test(depends_on=[test_history_after_root_on_create])
def test_reset_root(self):
"""Test that root reset does not alter the timestamp"""
"""Test that root reset does not alter the timestamp."""
orig_timestamp = self.root_enabled_timestamp
self.dbaas.root.create(self.instance_id)
assert_equal(200, self.dbaas.last_http_code)

View File

@ -371,7 +371,7 @@ class TestUsers(object):
@test(enabled=False)
#TODO(hub_cap): Make this test work once python-routes is updated, if ever.
def test_delete_user_with_period_in_name(self):
"""Attempt to create/destroy a user with a period in its name"""
"""Attempt to create/destroy a user with a period in its name."""
users = []
username_with_period = "user.name"
users.append({"name": username_with_period, "password": self.password,

View File

@ -29,7 +29,7 @@ GROUP = "dbaas.api.versions"
@test(groups=[tests.DBAAS_API, GROUP, tests.PRE_INSTANCES, 'DBAAS_VERSIONS'],
depends_on_groups=["services.initialize"])
class Versions(object):
"""Test listing all versions and verify the current version"""
"""Test listing all versions and verify the current version."""
@before_class
def setUp(self):

View File

@ -43,7 +43,7 @@ class FakeSwiftClient(object):
class FakeSwiftConnection(object):
"""Logging calls instead of executing"""
"""Logging calls instead of executing."""
MANIFEST_HEADER_KEY = 'X-Object-Manifest'
url = 'http://mockswift/v1'

View File

@ -179,7 +179,7 @@ class LimitsControllerTest(BaseLimitTestSuite):
class TestLimiter(limits.Limiter):
"""Note: This was taken from Nova"""
"""Note: This was taken from Nova."""
pass

View File

@ -61,7 +61,7 @@ class MockCheckProcessBackup(MockBackup):
class MockLossyBackup(MockBackup):
"""Fake Incomplete writes to swift"""
"""Fake Incomplete writes to swift."""
def read(self, *args):
results = super(MockLossyBackup, self).read(*args)
@ -71,7 +71,7 @@ class MockLossyBackup(MockBackup):
class MockSwift(object):
"""Store files in String"""
"""Store files in String."""
def __init__(self, *args, **kwargs):
self.store = ''

View File

@ -28,7 +28,7 @@ from trove.guestagent.strategies.storage.swift import StreamReader
class SwiftStorageSaveChecksumTests(testtools.TestCase):
"""SwiftStorage.save is used to save a backup to Swift"""
"""SwiftStorage.save is used to save a backup to Swift."""
def setUp(self):
super(SwiftStorageSaveChecksumTests, self).setUp()
@ -37,7 +37,7 @@ class SwiftStorageSaveChecksumTests(testtools.TestCase):
super(SwiftStorageSaveChecksumTests, self).tearDown()
def test_swift_checksum_save(self):
"""This tests that SwiftStorage.save returns the swift checksum"""
"""This tests that SwiftStorage.save returns the swift checksum."""
context = TroveContext()
backup_id = '123'
user = 'user'

View File

@ -214,7 +214,7 @@ def unquote_user_host(user_hostname):
def iso_time(time_string):
"""Return a iso formated datetime: 2013-04-15T19:50:23Z"""
"""Return a iso formated datetime: 2013-04-15T19:50:23Z."""
ts = time_string.replace(' ', 'T')
try:
micro = ts.rindex('.')
@ -266,7 +266,7 @@ class MySqlConnection(object):
class LocalSqlClient(object):
"""A sqlalchemy wrapper to manage transactions"""
"""A sqlalchemy wrapper to manage transactions."""
def __init__(self, engine, use_flush=True):
self.engine = engine