docstring cleanup, nova dir
This commit is contained in:
		| @@ -16,31 +16,34 @@ | ||||
| #    License for the specific language governing permissions and limitations | ||||
| #    under the License. | ||||
|  | ||||
| """ | ||||
| Nova base exception handling, including decorator for re-raising | ||||
| Nova-type exceptions. SHOULD include dedicated exception logging. | ||||
| """Nova base exception handling. | ||||
|  | ||||
| Includes decorator for re-raising Nova-type exceptions. | ||||
|  | ||||
| SHOULD include dedicated exception logging. | ||||
|  | ||||
| """ | ||||
|  | ||||
| from nova import log as logging | ||||
|  | ||||
|  | ||||
| LOG = logging.getLogger('nova.exception') | ||||
|  | ||||
|  | ||||
| class ProcessExecutionError(IOError): | ||||
|  | ||||
|     def __init__(self, stdout=None, stderr=None, exit_code=None, cmd=None, | ||||
|                  description=None): | ||||
|         if description is None: | ||||
|             description = _("Unexpected error while running command.") | ||||
|             description = _('Unexpected error while running command.') | ||||
|         if exit_code is None: | ||||
|             exit_code = '-' | ||||
|         message = _("%(description)s\nCommand: %(cmd)s\n" | ||||
|                 "Exit code: %(exit_code)s\nStdout: %(stdout)r\n" | ||||
|                 "Stderr: %(stderr)r") % locals() | ||||
|         message = _('%(description)s\nCommand: %(cmd)s\n' | ||||
|                     'Exit code: %(exit_code)s\nStdout: %(stdout)r\n' | ||||
|                     'Stderr: %(stderr)r') % locals() | ||||
|         IOError.__init__(self, message) | ||||
|  | ||||
|  | ||||
| class Error(Exception): | ||||
|  | ||||
|     def __init__(self, message=None): | ||||
|         super(Error, self).__init__(message) | ||||
|  | ||||
| @@ -97,7 +100,7 @@ class TimeoutException(Error): | ||||
|  | ||||
|  | ||||
| class DBError(Error): | ||||
|     """Wraps an implementation specific exception""" | ||||
|     """Wraps an implementation specific exception.""" | ||||
|     def __init__(self, inner_exception): | ||||
|         self.inner_exception = inner_exception | ||||
|         super(DBError, self).__init__(str(inner_exception)) | ||||
| @@ -108,7 +111,7 @@ def wrap_db_error(f): | ||||
|         try: | ||||
|             return f(*args, **kwargs) | ||||
|         except Exception, e: | ||||
|             LOG.exception(_('DB exception wrapped')) | ||||
|             LOG.exception(_('DB exception wrapped.')) | ||||
|             raise DBError(e) | ||||
|     return _wrap | ||||
|     _wrap.func_name = f.func_name | ||||
|   | ||||
| @@ -18,14 +18,14 @@ | ||||
|  | ||||
| """Super simple fake memcache client.""" | ||||
|  | ||||
| import utils | ||||
| from nova import utils | ||||
|  | ||||
|  | ||||
| class Client(object): | ||||
|     """Replicates a tiny subset of memcached client interface.""" | ||||
|  | ||||
|     def __init__(self, *args, **kwargs): | ||||
|         """Ignores the passed in args""" | ||||
|         """Ignores the passed in args.""" | ||||
|         self.cache = {} | ||||
|  | ||||
|     def get(self, key): | ||||
|   | ||||
| @@ -16,9 +16,13 @@ | ||||
| #    License for the specific language governing permissions and limitations | ||||
| #    under the License. | ||||
|  | ||||
| """ | ||||
| """Command-line flag library. | ||||
|  | ||||
| Wraps gflags. | ||||
|  | ||||
| Package-level global flags are defined here, the rest are defined | ||||
| where they're used. | ||||
|  | ||||
| """ | ||||
|  | ||||
| import getopt | ||||
| @@ -145,10 +149,12 @@ class FlagValues(gflags.FlagValues): | ||||
|  | ||||
|  | ||||
| class StrWrapper(object): | ||||
|     """Wrapper around FlagValues objects | ||||
|     """Wrapper around FlagValues objects. | ||||
|  | ||||
|     Wraps FlagValues objects for string.Template so that we're | ||||
|     sure to return strings.""" | ||||
|     sure to return strings. | ||||
|  | ||||
|     """ | ||||
|     def __init__(self, context_objs): | ||||
|         self.context_objs = context_objs | ||||
|  | ||||
| @@ -169,6 +175,7 @@ def _GetCallingModule(): | ||||
|  | ||||
|     We generally use this function to get the name of the module calling a | ||||
|     DEFINE_foo... function. | ||||
|  | ||||
|     """ | ||||
|     # Walk down the stack to find the first globals dict that's not ours. | ||||
|     for depth in range(1, sys.getrecursionlimit()): | ||||
| @@ -192,6 +199,7 @@ def __GetModuleName(globals_dict): | ||||
|     Returns: | ||||
|     A string (the name of the module) or None (if the module could not | ||||
|     be identified. | ||||
|  | ||||
|     """ | ||||
|     for name, module in sys.modules.iteritems(): | ||||
|         if getattr(module, '__dict__', None) is globals_dict: | ||||
| @@ -326,7 +334,7 @@ DEFINE_integer('auth_token_ttl', 3600, 'Seconds for auth tokens to linger') | ||||
| DEFINE_string('state_path', os.path.join(os.path.dirname(__file__), '../'), | ||||
|               "Top-level directory for maintaining nova's state") | ||||
| DEFINE_string('lock_path', os.path.join(os.path.dirname(__file__), '../'), | ||||
|               "Directory for lock files") | ||||
|               'Directory for lock files') | ||||
| DEFINE_string('logdir', None, 'output to a per-service log file in named ' | ||||
|                               'directory') | ||||
|  | ||||
|   | ||||
							
								
								
									
										49
									
								
								nova/log.py
									
									
									
									
									
								
							
							
						
						
									
										49
									
								
								nova/log.py
									
									
									
									
									
								
							| @@ -16,16 +16,15 @@ | ||||
| #    License for the specific language governing permissions and limitations | ||||
| #    under the License. | ||||
|  | ||||
| """ | ||||
| Nova logging handler. | ||||
| """Nova logging handler. | ||||
|  | ||||
| This module adds to logging functionality by adding the option to specify | ||||
| a context object when calling the various log methods.  If the context object | ||||
| is not specified, default formatting is used. | ||||
|  | ||||
| It also allows setting of formatting information through flags. | ||||
| """ | ||||
|  | ||||
| """ | ||||
|  | ||||
| import cStringIO | ||||
| import inspect | ||||
| @@ -41,34 +40,28 @@ from nova import version | ||||
|  | ||||
|  | ||||
| FLAGS = flags.FLAGS | ||||
|  | ||||
| flags.DEFINE_string('logging_context_format_string', | ||||
|                     '%(asctime)s %(levelname)s %(name)s ' | ||||
|                     '[%(request_id)s %(user)s ' | ||||
|                     '%(project)s] %(message)s', | ||||
|                     'format string to use for log messages with context') | ||||
|  | ||||
| flags.DEFINE_string('logging_default_format_string', | ||||
|                     '%(asctime)s %(levelname)s %(name)s [-] ' | ||||
|                     '%(message)s', | ||||
|                     'format string to use for log messages without context') | ||||
|  | ||||
| flags.DEFINE_string('logging_debug_format_suffix', | ||||
|                     'from (pid=%(process)d) %(funcName)s' | ||||
|                     ' %(pathname)s:%(lineno)d', | ||||
|                     'data to append to log format when level is DEBUG') | ||||
|  | ||||
| flags.DEFINE_string('logging_exception_prefix', | ||||
|                     '(%(name)s): TRACE: ', | ||||
|                     'prefix each line of exception output with this format') | ||||
|  | ||||
| flags.DEFINE_list('default_log_levels', | ||||
|                   ['amqplib=WARN', | ||||
|                    'sqlalchemy=WARN', | ||||
|                    'boto=WARN', | ||||
|                    'eventlet.wsgi.server=WARN'], | ||||
|                   'list of logger=LEVEL pairs') | ||||
|  | ||||
| flags.DEFINE_bool('use_syslog', False, 'output to syslog') | ||||
| flags.DEFINE_string('logfile', None, 'output to named file') | ||||
|  | ||||
| @@ -83,6 +76,8 @@ WARN = logging.WARN | ||||
| INFO = logging.INFO | ||||
| DEBUG = logging.DEBUG | ||||
| NOTSET = logging.NOTSET | ||||
|  | ||||
|  | ||||
| # methods | ||||
| getLogger = logging.getLogger | ||||
| debug = logging.debug | ||||
| @@ -93,6 +88,8 @@ error = logging.error | ||||
| exception = logging.exception | ||||
| critical = logging.critical | ||||
| log = logging.log | ||||
|  | ||||
|  | ||||
| # handlers | ||||
| StreamHandler = logging.StreamHandler | ||||
| WatchedFileHandler = logging.handlers.WatchedFileHandler | ||||
| @@ -127,17 +124,18 @@ def _get_log_file_path(binary=None): | ||||
|  | ||||
|  | ||||
| class NovaLogger(logging.Logger): | ||||
|     """ | ||||
|     NovaLogger manages request context and formatting. | ||||
|     """NovaLogger manages request context and formatting. | ||||
|  | ||||
|     This becomes the class that is instanciated by logging.getLogger. | ||||
|  | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, name, level=NOTSET): | ||||
|         logging.Logger.__init__(self, name, level) | ||||
|         self.setup_from_flags() | ||||
|  | ||||
|     def setup_from_flags(self): | ||||
|         """Setup logger from flags""" | ||||
|         """Setup logger from flags.""" | ||||
|         level = NOTSET | ||||
|         for pair in FLAGS.default_log_levels: | ||||
|             logger, _sep, level_name = pair.partition('=') | ||||
| @@ -148,7 +146,7 @@ class NovaLogger(logging.Logger): | ||||
|         self.setLevel(level) | ||||
|  | ||||
|     def _log(self, level, msg, args, exc_info=None, extra=None, context=None): | ||||
|         """Extract context from any log call""" | ||||
|         """Extract context from any log call.""" | ||||
|         if not extra: | ||||
|             extra = {} | ||||
|         if context: | ||||
| @@ -157,17 +155,17 @@ class NovaLogger(logging.Logger): | ||||
|         return logging.Logger._log(self, level, msg, args, exc_info, extra) | ||||
|  | ||||
|     def addHandler(self, handler): | ||||
|         """Each handler gets our custom formatter""" | ||||
|         """Each handler gets our custom formatter.""" | ||||
|         handler.setFormatter(_formatter) | ||||
|         return logging.Logger.addHandler(self, handler) | ||||
|  | ||||
|     def audit(self, msg, *args, **kwargs): | ||||
|         """Shortcut for our AUDIT level""" | ||||
|         """Shortcut for our AUDIT level.""" | ||||
|         if self.isEnabledFor(AUDIT): | ||||
|             self._log(AUDIT, msg, args, **kwargs) | ||||
|  | ||||
|     def exception(self, msg, *args, **kwargs): | ||||
|         """Logging.exception doesn't handle kwargs, so breaks context""" | ||||
|         """Logging.exception doesn't handle kwargs, so breaks context.""" | ||||
|         if not kwargs.get('exc_info'): | ||||
|             kwargs['exc_info'] = 1 | ||||
|         self.error(msg, *args, **kwargs) | ||||
| @@ -181,14 +179,13 @@ class NovaLogger(logging.Logger): | ||||
|             for k in env.keys(): | ||||
|                 if not isinstance(env[k], str): | ||||
|                     env.pop(k) | ||||
|             message = "Environment: %s" % json.dumps(env) | ||||
|             message = 'Environment: %s' % json.dumps(env) | ||||
|             kwargs.pop('exc_info') | ||||
|             self.error(message, **kwargs) | ||||
|  | ||||
|  | ||||
| class NovaFormatter(logging.Formatter): | ||||
|     """ | ||||
|     A nova.context.RequestContext aware formatter configured through flags. | ||||
|     """A nova.context.RequestContext aware formatter configured through flags. | ||||
|  | ||||
|     The flags used to set format strings are: logging_context_foramt_string | ||||
|     and logging_default_format_string.  You can also specify | ||||
| @@ -197,10 +194,11 @@ class NovaFormatter(logging.Formatter): | ||||
|  | ||||
|     For information about what variables are available for the formatter see: | ||||
|     http://docs.python.org/library/logging.html#formatter | ||||
|  | ||||
|     """ | ||||
|  | ||||
|     def format(self, record): | ||||
|         """Uses contextstring if request_id is set, otherwise default""" | ||||
|         """Uses contextstring if request_id is set, otherwise default.""" | ||||
|         if record.__dict__.get('request_id', None): | ||||
|             self._fmt = FLAGS.logging_context_format_string | ||||
|         else: | ||||
| @@ -214,20 +212,21 @@ class NovaFormatter(logging.Formatter): | ||||
|         return logging.Formatter.format(self, record) | ||||
|  | ||||
|     def formatException(self, exc_info, record=None): | ||||
|         """Format exception output with FLAGS.logging_exception_prefix""" | ||||
|         """Format exception output with FLAGS.logging_exception_prefix.""" | ||||
|         if not record: | ||||
|             return logging.Formatter.formatException(self, exc_info) | ||||
|         stringbuffer = cStringIO.StringIO() | ||||
|         traceback.print_exception(exc_info[0], exc_info[1], exc_info[2], | ||||
|                                   None, stringbuffer) | ||||
|         lines = stringbuffer.getvalue().split("\n") | ||||
|         lines = stringbuffer.getvalue().split('\n') | ||||
|         stringbuffer.close() | ||||
|         formatted_lines = [] | ||||
|         for line in lines: | ||||
|             pl = FLAGS.logging_exception_prefix % record.__dict__ | ||||
|             fl = "%s%s" % (pl, line) | ||||
|             fl = '%s%s' % (pl, line) | ||||
|             formatted_lines.append(fl) | ||||
|         return "\n".join(formatted_lines) | ||||
|         return '\n'.join(formatted_lines) | ||||
|  | ||||
|  | ||||
| _formatter = NovaFormatter() | ||||
|  | ||||
| @@ -241,7 +240,7 @@ class NovaRootLogger(NovaLogger): | ||||
|         NovaLogger.__init__(self, name, level) | ||||
|  | ||||
|     def setup_from_flags(self): | ||||
|         """Setup logger from flags""" | ||||
|         """Setup logger from flags.""" | ||||
|         global _filelog | ||||
|         if FLAGS.use_syslog: | ||||
|             self.syslog = SysLogHandler(address='/dev/log') | ||||
|   | ||||
| @@ -16,7 +16,8 @@ | ||||
| #    License for the specific language governing permissions and limitations | ||||
| #    under the License. | ||||
|  | ||||
| """ | ||||
| """Base Manager class. | ||||
|  | ||||
| Managers are responsible for a certain aspect of the sytem.  It is a logical | ||||
| grouping of code relating to a portion of the system.  In general other | ||||
| components should be using the manager to make changes to the components that | ||||
| @@ -49,16 +50,19 @@ Managers will often provide methods for initial setup of a host or periodic | ||||
| tasksto a wrapping service. | ||||
|  | ||||
| This module provides Manager, a base class for managers. | ||||
|  | ||||
| """ | ||||
|  | ||||
| from nova import utils | ||||
| from nova import flags | ||||
| from nova import log as logging | ||||
| from nova import utils | ||||
| from nova.db import base | ||||
| from nova.scheduler import api | ||||
|  | ||||
|  | ||||
| FLAGS = flags.FLAGS | ||||
|  | ||||
|  | ||||
| LOG = logging.getLogger('nova.manager') | ||||
|  | ||||
|  | ||||
| @@ -70,23 +74,29 @@ class Manager(base.Base): | ||||
|         super(Manager, self).__init__(db_driver) | ||||
|  | ||||
|     def periodic_tasks(self, context=None): | ||||
|         """Tasks to be run at a periodic interval""" | ||||
|         """Tasks to be run at a periodic interval.""" | ||||
|         pass | ||||
|  | ||||
|     def init_host(self): | ||||
|         """Do any initialization that needs to be run if this is a standalone | ||||
|         service. Child classes should override this method.""" | ||||
|         """Handle initialization if this is a standalone service. | ||||
|  | ||||
|         Child classes should override this method. | ||||
|  | ||||
|         """ | ||||
|         pass | ||||
|  | ||||
|  | ||||
| class SchedulerDependentManager(Manager): | ||||
|     """Periodically send capability updates to the Scheduler services. | ||||
|        Services that need to update the Scheduler of their capabilities | ||||
|        should derive from this class. Otherwise they can derive from | ||||
|        manager.Manager directly. Updates are only sent after | ||||
|        update_service_capabilities is called with non-None values.""" | ||||
|  | ||||
|     def __init__(self, host=None, db_driver=None, service_name="undefined"): | ||||
|     Services that need to update the Scheduler of their capabilities | ||||
|     should derive from this class. Otherwise they can derive from | ||||
|     manager.Manager directly. Updates are only sent after | ||||
|     update_service_capabilities is called with non-None values. | ||||
|  | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, host=None, db_driver=None, service_name='undefined'): | ||||
|         self.last_capabilities = None | ||||
|         self.service_name = service_name | ||||
|         super(SchedulerDependentManager, self).__init__(host, db_driver) | ||||
| @@ -96,9 +106,9 @@ class SchedulerDependentManager(Manager): | ||||
|         self.last_capabilities = capabilities | ||||
|  | ||||
|     def periodic_tasks(self, context=None): | ||||
|         """Pass data back to the scheduler at a periodic interval""" | ||||
|         """Pass data back to the scheduler at a periodic interval.""" | ||||
|         if self.last_capabilities: | ||||
|             LOG.debug(_("Notifying Schedulers of capabilities ...")) | ||||
|             LOG.debug(_('Notifying Schedulers of capabilities ...')) | ||||
|             api.update_service_capabilities(context, self.service_name, | ||||
|                                 self.host, self.last_capabilities) | ||||
|  | ||||
|   | ||||
| @@ -15,16 +15,15 @@ | ||||
| #    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the | ||||
| #    License for the specific language governing permissions and limitations | ||||
| #    under the License. | ||||
| """ | ||||
| Quotas for instances, volumes, and floating ips | ||||
| """ | ||||
|  | ||||
| """Quotas for instances, volumes, and floating ips.""" | ||||
|  | ||||
| from nova import db | ||||
| from nova import exception | ||||
| from nova import flags | ||||
|  | ||||
| FLAGS = flags.FLAGS | ||||
|  | ||||
| FLAGS = flags.FLAGS | ||||
| flags.DEFINE_integer('quota_instances', 10, | ||||
|                      'number of instances allowed per project') | ||||
| flags.DEFINE_integer('quota_cores', 20, | ||||
| @@ -64,7 +63,7 @@ def get_quota(context, project_id): | ||||
|  | ||||
|  | ||||
| def allowed_instances(context, num_instances, instance_type): | ||||
|     """Check quota and return min(num_instances, allowed_instances)""" | ||||
|     """Check quota and return min(num_instances, allowed_instances).""" | ||||
|     project_id = context.project_id | ||||
|     context = context.elevated() | ||||
|     used_instances, used_cores = db.instance_data_get_for_project(context, | ||||
| @@ -79,7 +78,7 @@ def allowed_instances(context, num_instances, instance_type): | ||||
|  | ||||
|  | ||||
| def allowed_volumes(context, num_volumes, size): | ||||
|     """Check quota and return min(num_volumes, allowed_volumes)""" | ||||
|     """Check quota and return min(num_volumes, allowed_volumes).""" | ||||
|     project_id = context.project_id | ||||
|     context = context.elevated() | ||||
|     used_volumes, used_gigabytes = db.volume_data_get_for_project(context, | ||||
| @@ -95,7 +94,7 @@ def allowed_volumes(context, num_volumes, size): | ||||
|  | ||||
|  | ||||
| def allowed_floating_ips(context, num_floating_ips): | ||||
|     """Check quota and return min(num_floating_ips, allowed_floating_ips)""" | ||||
|     """Check quota and return min(num_floating_ips, allowed_floating_ips).""" | ||||
|     project_id = context.project_id | ||||
|     context = context.elevated() | ||||
|     used_floating_ips = db.floating_ip_count_by_project(context, project_id) | ||||
| @@ -105,7 +104,7 @@ def allowed_floating_ips(context, num_floating_ips): | ||||
|  | ||||
|  | ||||
| def allowed_metadata_items(context, num_metadata_items): | ||||
|     """Check quota; return min(num_metadata_items,allowed_metadata_items)""" | ||||
|     """Check quota; return min(num_metadata_items,allowed_metadata_items).""" | ||||
|     project_id = context.project_id | ||||
|     context = context.elevated() | ||||
|     quota = get_quota(context, project_id) | ||||
| @@ -114,20 +113,20 @@ def allowed_metadata_items(context, num_metadata_items): | ||||
|  | ||||
|  | ||||
| def allowed_injected_files(context): | ||||
|     """Return the number of injected files allowed""" | ||||
|     """Return the number of injected files allowed.""" | ||||
|     return FLAGS.quota_max_injected_files | ||||
|  | ||||
|  | ||||
| def allowed_injected_file_content_bytes(context): | ||||
|     """Return the number of bytes allowed per injected file content""" | ||||
|     """Return the number of bytes allowed per injected file content.""" | ||||
|     return FLAGS.quota_max_injected_file_content_bytes | ||||
|  | ||||
|  | ||||
| def allowed_injected_file_path_bytes(context): | ||||
|     """Return the number of bytes allowed in an injected file path""" | ||||
|     """Return the number of bytes allowed in an injected file path.""" | ||||
|     return FLAGS.quota_max_injected_file_path_bytes | ||||
|  | ||||
|  | ||||
| class QuotaError(exception.ApiError): | ||||
|     """Quota Exceeeded""" | ||||
|     """Quota Exceeeded.""" | ||||
|     pass | ||||
|   | ||||
							
								
								
									
										152
									
								
								nova/rpc.py
									
									
									
									
									
								
							
							
						
						
									
										152
									
								
								nova/rpc.py
									
									
									
									
									
								
							| @@ -16,9 +16,12 @@ | ||||
| #    License for the specific language governing permissions and limitations | ||||
| #    under the License. | ||||
|  | ||||
| """ | ||||
| AMQP-based RPC. Queues have consumers and publishers. | ||||
| """AMQP-based RPC. | ||||
|  | ||||
| Queues have consumers and publishers. | ||||
|  | ||||
| No fan-out support yet. | ||||
|  | ||||
| """ | ||||
|  | ||||
| import json | ||||
| @@ -40,17 +43,19 @@ from nova import log as logging | ||||
| from nova import utils | ||||
|  | ||||
|  | ||||
| FLAGS = flags.FLAGS | ||||
| LOG = logging.getLogger('nova.rpc') | ||||
|  | ||||
|  | ||||
| FLAGS = flags.FLAGS | ||||
| flags.DEFINE_integer('rpc_thread_pool_size', 1024, 'Size of RPC thread pool') | ||||
|  | ||||
|  | ||||
| class Connection(carrot_connection.BrokerConnection): | ||||
|     """Connection instance object""" | ||||
|     """Connection instance object.""" | ||||
|  | ||||
|     @classmethod | ||||
|     def instance(cls, new=True): | ||||
|         """Returns the instance""" | ||||
|         """Returns the instance.""" | ||||
|         if new or not hasattr(cls, '_instance'): | ||||
|             params = dict(hostname=FLAGS.rabbit_host, | ||||
|                           port=FLAGS.rabbit_port, | ||||
| @@ -71,9 +76,11 @@ class Connection(carrot_connection.BrokerConnection): | ||||
|  | ||||
|     @classmethod | ||||
|     def recreate(cls): | ||||
|         """Recreates the connection instance | ||||
|         """Recreates the connection instance. | ||||
|  | ||||
|         This is necessary to recover from some network errors/disconnects""" | ||||
|         This is necessary to recover from some network errors/disconnects. | ||||
|  | ||||
|         """ | ||||
|         try: | ||||
|             del cls._instance | ||||
|         except AttributeError, e: | ||||
| @@ -84,10 +91,12 @@ class Connection(carrot_connection.BrokerConnection): | ||||
|  | ||||
|  | ||||
| class Consumer(messaging.Consumer): | ||||
|     """Consumer base class | ||||
|     """Consumer base class. | ||||
|  | ||||
|     Contains methods for connecting the fetch method to async loops. | ||||
|  | ||||
|     Contains methods for connecting the fetch method to async loops | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, *args, **kwargs): | ||||
|         for i in xrange(FLAGS.rabbit_max_retries): | ||||
|             if i > 0: | ||||
| @@ -100,19 +109,18 @@ class Consumer(messaging.Consumer): | ||||
|                 fl_host = FLAGS.rabbit_host | ||||
|                 fl_port = FLAGS.rabbit_port | ||||
|                 fl_intv = FLAGS.rabbit_retry_interval | ||||
|                 LOG.error(_("AMQP server on %(fl_host)s:%(fl_port)d is" | ||||
|                         " unreachable: %(e)s. Trying again in %(fl_intv)d" | ||||
|                         " seconds.") | ||||
|                         % locals()) | ||||
|                 LOG.error(_('AMQP server on %(fl_host)s:%(fl_port)d is' | ||||
|                             ' unreachable: %(e)s. Trying again in %(fl_intv)d' | ||||
|                             ' seconds.') % locals()) | ||||
|                 self.failed_connection = True | ||||
|         if self.failed_connection: | ||||
|             LOG.error(_("Unable to connect to AMQP server " | ||||
|                         "after %d tries. Shutting down."), | ||||
|             LOG.error(_('Unable to connect to AMQP server ' | ||||
|                         'after %d tries. Shutting down.'), | ||||
|                       FLAGS.rabbit_max_retries) | ||||
|             sys.exit(1) | ||||
|  | ||||
|     def fetch(self, no_ack=None, auto_ack=None, enable_callbacks=False): | ||||
|         """Wraps the parent fetch with some logic for failed connections""" | ||||
|         """Wraps the parent fetch with some logic for failed connection.""" | ||||
|         # TODO(vish): the logic for failed connections and logging should be | ||||
|         #             refactored into some sort of connection manager object | ||||
|         try: | ||||
| @@ -125,14 +133,14 @@ class Consumer(messaging.Consumer): | ||||
|                 self.declare() | ||||
|             super(Consumer, self).fetch(no_ack, auto_ack, enable_callbacks) | ||||
|             if self.failed_connection: | ||||
|                 LOG.error(_("Reconnected to queue")) | ||||
|                 LOG.error(_('Reconnected to queue')) | ||||
|                 self.failed_connection = False | ||||
|         # NOTE(vish): This is catching all errors because we really don't | ||||
|         #             want exceptions to be logged 10 times a second if some | ||||
|         #             persistent failure occurs. | ||||
|         except Exception, e:  # pylint: disable=W0703 | ||||
|             if not self.failed_connection: | ||||
|                 LOG.exception(_("Failed to fetch message from queue: %s" % e)) | ||||
|                 LOG.exception(_('Failed to fetch message from queue: %s' % e)) | ||||
|                 self.failed_connection = True | ||||
|  | ||||
|     def attach_to_eventlet(self): | ||||
| @@ -143,8 +151,9 @@ class Consumer(messaging.Consumer): | ||||
|  | ||||
|  | ||||
| class AdapterConsumer(Consumer): | ||||
|     """Calls methods on a proxy object based on method and args""" | ||||
|     def __init__(self, connection=None, topic="broadcast", proxy=None): | ||||
|     """Calls methods on a proxy object based on method and args.""" | ||||
|  | ||||
|     def __init__(self, connection=None, topic='broadcast', proxy=None): | ||||
|         LOG.debug(_('Initing the Adapter Consumer for %s') % topic) | ||||
|         self.proxy = proxy | ||||
|         self.pool = greenpool.GreenPool(FLAGS.rpc_thread_pool_size) | ||||
| @@ -156,13 +165,14 @@ class AdapterConsumer(Consumer): | ||||
|  | ||||
|     @exception.wrap_exception | ||||
|     def _receive(self, message_data, message): | ||||
|         """Magically looks for a method on the proxy object and calls it | ||||
|         """Magically looks for a method on the proxy object and calls it. | ||||
|  | ||||
|         Message data should be a dictionary with two keys: | ||||
|             method: string representing the method to call | ||||
|             args: dictionary of arg: value | ||||
|  | ||||
|         Example: {'method': 'echo', 'args': {'value': 42}} | ||||
|  | ||||
|         """ | ||||
|         LOG.debug(_('received %s') % message_data) | ||||
|         msg_id = message_data.pop('_msg_id', None) | ||||
| @@ -189,22 +199,23 @@ class AdapterConsumer(Consumer): | ||||
|             if msg_id: | ||||
|                 msg_reply(msg_id, rval, None) | ||||
|         except Exception as e: | ||||
|             logging.exception("Exception during message handling") | ||||
|             logging.exception('Exception during message handling') | ||||
|             if msg_id: | ||||
|                 msg_reply(msg_id, None, sys.exc_info()) | ||||
|         return | ||||
|  | ||||
|  | ||||
| class Publisher(messaging.Publisher): | ||||
|     """Publisher base class""" | ||||
|     """Publisher base class.""" | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class TopicAdapterConsumer(AdapterConsumer): | ||||
|     """Consumes messages on a specific topic""" | ||||
|     exchange_type = "topic" | ||||
|     """Consumes messages on a specific topic.""" | ||||
|  | ||||
|     def __init__(self, connection=None, topic="broadcast", proxy=None): | ||||
|     exchange_type = 'topic' | ||||
|  | ||||
|     def __init__(self, connection=None, topic='broadcast', proxy=None): | ||||
|         self.queue = topic | ||||
|         self.routing_key = topic | ||||
|         self.exchange = FLAGS.control_exchange | ||||
| @@ -214,27 +225,29 @@ class TopicAdapterConsumer(AdapterConsumer): | ||||
|  | ||||
|  | ||||
| class FanoutAdapterConsumer(AdapterConsumer): | ||||
|     """Consumes messages from a fanout exchange""" | ||||
|     exchange_type = "fanout" | ||||
|     """Consumes messages from a fanout exchange.""" | ||||
|  | ||||
|     def __init__(self, connection=None, topic="broadcast", proxy=None): | ||||
|         self.exchange = "%s_fanout" % topic | ||||
|     exchange_type = 'fanout' | ||||
|  | ||||
|     def __init__(self, connection=None, topic='broadcast', proxy=None): | ||||
|         self.exchange = '%s_fanout' % topic | ||||
|         self.routing_key = topic | ||||
|         unique = uuid.uuid4().hex | ||||
|         self.queue = "%s_fanout_%s" % (topic, unique) | ||||
|         self.queue = '%s_fanout_%s' % (topic, unique) | ||||
|         self.durable = False | ||||
|         LOG.info(_("Created '%(exchange)s' fanout exchange " | ||||
|                    "with '%(key)s' routing key"), | ||||
|                 dict(exchange=self.exchange, key=self.routing_key)) | ||||
|         LOG.info(_('Created "%(exchange)s" fanout exchange ' | ||||
|                    'with "%(key)s" routing key'), | ||||
|                  dict(exchange=self.exchange, key=self.routing_key)) | ||||
|         super(FanoutAdapterConsumer, self).__init__(connection=connection, | ||||
|                                     topic=topic, proxy=proxy) | ||||
|  | ||||
|  | ||||
| class TopicPublisher(Publisher): | ||||
|     """Publishes messages on a specific topic""" | ||||
|     exchange_type = "topic" | ||||
|     """Publishes messages on a specific topic.""" | ||||
|  | ||||
|     def __init__(self, connection=None, topic="broadcast"): | ||||
|     exchange_type = 'topic' | ||||
|  | ||||
|     def __init__(self, connection=None, topic='broadcast'): | ||||
|         self.routing_key = topic | ||||
|         self.exchange = FLAGS.control_exchange | ||||
|         self.durable = False | ||||
| @@ -243,20 +256,22 @@ class TopicPublisher(Publisher): | ||||
|  | ||||
| class FanoutPublisher(Publisher): | ||||
|     """Publishes messages to a fanout exchange.""" | ||||
|     exchange_type = "fanout" | ||||
|  | ||||
|     exchange_type = 'fanout' | ||||
|  | ||||
|     def __init__(self, topic, connection=None): | ||||
|         self.exchange = "%s_fanout" % topic | ||||
|         self.queue = "%s_fanout" % topic | ||||
|         self.exchange = '%s_fanout' % topic | ||||
|         self.queue = '%s_fanout' % topic | ||||
|         self.durable = False | ||||
|         LOG.info(_("Creating '%(exchange)s' fanout exchange"), | ||||
|                             dict(exchange=self.exchange)) | ||||
|         LOG.info(_('Creating "%(exchange)s" fanout exchange'), | ||||
|                  dict(exchange=self.exchange)) | ||||
|         super(FanoutPublisher, self).__init__(connection=connection) | ||||
|  | ||||
|  | ||||
| class DirectConsumer(Consumer): | ||||
|     """Consumes messages directly on a channel specified by msg_id""" | ||||
|     exchange_type = "direct" | ||||
|     """Consumes messages directly on a channel specified by msg_id.""" | ||||
|  | ||||
|     exchange_type = 'direct' | ||||
|  | ||||
|     def __init__(self, connection=None, msg_id=None): | ||||
|         self.queue = msg_id | ||||
| @@ -268,8 +283,9 @@ class DirectConsumer(Consumer): | ||||
|  | ||||
|  | ||||
| class DirectPublisher(Publisher): | ||||
|     """Publishes messages directly on a channel specified by msg_id""" | ||||
|     exchange_type = "direct" | ||||
|     """Publishes messages directly on a channel specified by msg_id.""" | ||||
|  | ||||
|     exchange_type = 'direct' | ||||
|  | ||||
|     def __init__(self, connection=None, msg_id=None): | ||||
|         self.routing_key = msg_id | ||||
| @@ -279,9 +295,9 @@ class DirectPublisher(Publisher): | ||||
|  | ||||
|  | ||||
| def msg_reply(msg_id, reply=None, failure=None): | ||||
|     """Sends a reply or an error on the channel signified by msg_id | ||||
|     """Sends a reply or an error on the channel signified by msg_id. | ||||
|  | ||||
|     failure should be a sys.exc_info() tuple. | ||||
|     Failure should be a sys.exc_info() tuple. | ||||
|  | ||||
|     """ | ||||
|     if failure: | ||||
| @@ -303,17 +319,20 @@ def msg_reply(msg_id, reply=None, failure=None): | ||||
|  | ||||
|  | ||||
| class RemoteError(exception.Error): | ||||
|     """Signifies that a remote class has raised an exception | ||||
|     """Signifies that a remote class has raised an exception. | ||||
|  | ||||
|     Containes a string representation of the type of the original exception, | ||||
|     the value of the original exception, and the traceback.  These are | ||||
|     sent to the parent as a joined string so printing the exception | ||||
|     contains all of the relevent info.""" | ||||
|     contains all of the relevent info. | ||||
|  | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, exc_type, value, traceback): | ||||
|         self.exc_type = exc_type | ||||
|         self.value = value | ||||
|         self.traceback = traceback | ||||
|         super(RemoteError, self).__init__("%s %s\n%s" % (exc_type, | ||||
|         super(RemoteError, self).__init__('%s %s\n%s' % (exc_type, | ||||
|                                                          value, | ||||
|                                                          traceback)) | ||||
|  | ||||
| @@ -339,6 +358,7 @@ def _pack_context(msg, context): | ||||
|     context out into a bunch of separate keys. If we want to support | ||||
|     more arguments in rabbit messages, we may want to do the same | ||||
|     for args at some point. | ||||
|  | ||||
|     """ | ||||
|     context = dict([('_context_%s' % key, value) | ||||
|                    for (key, value) in context.to_dict().iteritems()]) | ||||
| @@ -346,11 +366,11 @@ def _pack_context(msg, context): | ||||
|  | ||||
|  | ||||
| def call(context, topic, msg): | ||||
|     """Sends a message on a topic and wait for a response""" | ||||
|     LOG.debug(_("Making asynchronous call on %s ..."), topic) | ||||
|     """Sends a message on a topic and wait for a response.""" | ||||
|     LOG.debug(_('Making asynchronous call on %s ...'), topic) | ||||
|     msg_id = uuid.uuid4().hex | ||||
|     msg.update({'_msg_id': msg_id}) | ||||
|     LOG.debug(_("MSG_ID is %s") % (msg_id)) | ||||
|     LOG.debug(_('MSG_ID is %s') % (msg_id)) | ||||
|     _pack_context(msg, context) | ||||
|  | ||||
|     class WaitMessage(object): | ||||
| @@ -387,8 +407,8 @@ def call(context, topic, msg): | ||||
|  | ||||
|  | ||||
| def cast(context, topic, msg): | ||||
|     """Sends a message on a topic without waiting for a response""" | ||||
|     LOG.debug(_("Making asynchronous cast on %s..."), topic) | ||||
|     """Sends a message on a topic without waiting for a response.""" | ||||
|     LOG.debug(_('Making asynchronous cast on %s...'), topic) | ||||
|     _pack_context(msg, context) | ||||
|     conn = Connection.instance() | ||||
|     publisher = TopicPublisher(connection=conn, topic=topic) | ||||
| @@ -397,8 +417,8 @@ def cast(context, topic, msg): | ||||
|  | ||||
|  | ||||
| def fanout_cast(context, topic, msg): | ||||
|     """Sends a message on a fanout exchange without waiting for a response""" | ||||
|     LOG.debug(_("Making asynchronous fanout cast...")) | ||||
|     """Sends a message on a fanout exchange without waiting for a response.""" | ||||
|     LOG.debug(_('Making asynchronous fanout cast...')) | ||||
|     _pack_context(msg, context) | ||||
|     conn = Connection.instance() | ||||
|     publisher = FanoutPublisher(topic, connection=conn) | ||||
| @@ -407,14 +427,14 @@ def fanout_cast(context, topic, msg): | ||||
|  | ||||
|  | ||||
| def generic_response(message_data, message): | ||||
|     """Logs a result and exits""" | ||||
|     """Logs a result and exits.""" | ||||
|     LOG.debug(_('response %s'), message_data) | ||||
|     message.ack() | ||||
|     sys.exit(0) | ||||
|  | ||||
|  | ||||
| def send_message(topic, message, wait=True): | ||||
|     """Sends a message for testing""" | ||||
|     """Sends a message for testing.""" | ||||
|     msg_id = uuid.uuid4().hex | ||||
|     message.update({'_msg_id': msg_id}) | ||||
|     LOG.debug(_('topic is %s'), topic) | ||||
| @@ -425,14 +445,14 @@ def send_message(topic, message, wait=True): | ||||
|                                       queue=msg_id, | ||||
|                                       exchange=msg_id, | ||||
|                                       auto_delete=True, | ||||
|                                       exchange_type="direct", | ||||
|                                       exchange_type='direct', | ||||
|                                       routing_key=msg_id) | ||||
|         consumer.register_callback(generic_response) | ||||
|  | ||||
|     publisher = messaging.Publisher(connection=Connection.instance(), | ||||
|                                     exchange=FLAGS.control_exchange, | ||||
|                                     durable=False, | ||||
|                                     exchange_type="topic", | ||||
|                                     exchange_type='topic', | ||||
|                                     routing_key=topic) | ||||
|     publisher.send(message) | ||||
|     publisher.close() | ||||
| @@ -441,8 +461,8 @@ def send_message(topic, message, wait=True): | ||||
|         consumer.wait() | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     # NOTE(vish): you can send messages from the command line using | ||||
|     #             topic and a json sting representing a dictionary | ||||
|     #             for the method | ||||
| if __name__ == '__main__': | ||||
|     # You can send messages from the command line using | ||||
|     # topic and a json string representing a dictionary | ||||
|     # for the method | ||||
|     send_message(sys.argv[1], json.loads(sys.argv[2])) | ||||
|   | ||||
| @@ -17,9 +17,7 @@ | ||||
| #    License for the specific language governing permissions and limitations | ||||
| #    under the License. | ||||
|  | ||||
| """ | ||||
| Generic Node baseclass for all workers that run on hosts | ||||
| """ | ||||
| """Generic Node baseclass for all workers that run on hosts.""" | ||||
|  | ||||
| import inspect | ||||
| import os | ||||
| @@ -30,13 +28,11 @@ from eventlet import event | ||||
| from eventlet import greenthread | ||||
| from eventlet import greenpool | ||||
|  | ||||
| from sqlalchemy.exc import OperationalError | ||||
|  | ||||
| from nova import context | ||||
| from nova import db | ||||
| from nova import exception | ||||
| from nova import log as logging | ||||
| from nova import flags | ||||
| from nova import log as logging | ||||
| from nova import rpc | ||||
| from nova import utils | ||||
| from nova import version | ||||
| @@ -79,7 +75,7 @@ class Service(object): | ||||
|  | ||||
|     def start(self): | ||||
|         vcs_string = version.version_string_with_vcs() | ||||
|         logging.audit(_("Starting %(topic)s node (version %(vcs_string)s)"), | ||||
|         logging.audit(_('Starting %(topic)s node (version %(vcs_string)s)'), | ||||
|                       {'topic': self.topic, 'vcs_string': vcs_string}) | ||||
|         self.manager.init_host() | ||||
|         self.model_disconnected = False | ||||
| @@ -140,29 +136,24 @@ class Service(object): | ||||
|         return getattr(manager, key) | ||||
|  | ||||
|     @classmethod | ||||
|     def create(cls, | ||||
|                host=None, | ||||
|                binary=None, | ||||
|                topic=None, | ||||
|                manager=None, | ||||
|                report_interval=None, | ||||
|                periodic_interval=None): | ||||
|     def create(cls, host=None, binary=None, topic=None, manager=None, | ||||
|                report_interval=None, periodic_interval=None): | ||||
|         """Instantiates class and passes back application object. | ||||
|  | ||||
|         Args: | ||||
|             host, defaults to FLAGS.host | ||||
|             binary, defaults to basename of executable | ||||
|             topic, defaults to bin_name - "nova-" part | ||||
|             manager, defaults to FLAGS.<topic>_manager | ||||
|             report_interval, defaults to FLAGS.report_interval | ||||
|             periodic_interval, defaults to FLAGS.periodic_interval | ||||
|         :param host: defaults to FLAGS.host | ||||
|         :param binary: defaults to basename of executable | ||||
|         :param topic: defaults to bin_name - 'nova-' part | ||||
|         :param manager: defaults to FLAGS.<topic>_manager | ||||
|         :param report_interval: defaults to FLAGS.report_interval | ||||
|         :param periodic_interval: defaults to FLAGS.periodic_interval | ||||
|  | ||||
|         """ | ||||
|         if not host: | ||||
|             host = FLAGS.host | ||||
|         if not binary: | ||||
|             binary = os.path.basename(inspect.stack()[-1][1]) | ||||
|         if not topic: | ||||
|             topic = binary.rpartition("nova-")[2] | ||||
|             topic = binary.rpartition('nova-')[2] | ||||
|         if not manager: | ||||
|             manager = FLAGS.get('%s_manager' % topic, None) | ||||
|         if not report_interval: | ||||
| @@ -175,12 +166,12 @@ class Service(object): | ||||
|         return service_obj | ||||
|  | ||||
|     def kill(self): | ||||
|         """Destroy the service object in the datastore""" | ||||
|         """Destroy the service object in the datastore.""" | ||||
|         self.stop() | ||||
|         try: | ||||
|             db.service_destroy(context.get_admin_context(), self.service_id) | ||||
|         except exception.NotFound: | ||||
|             logging.warn(_("Service killed that has no database entry")) | ||||
|             logging.warn(_('Service killed that has no database entry')) | ||||
|  | ||||
|     def stop(self): | ||||
|         for x in self.timers: | ||||
| @@ -198,7 +189,7 @@ class Service(object): | ||||
|                 pass | ||||
|  | ||||
|     def periodic_tasks(self): | ||||
|         """Tasks to be run at a periodic interval""" | ||||
|         """Tasks to be run at a periodic interval.""" | ||||
|         self.manager.periodic_tasks(context.get_admin_context()) | ||||
|  | ||||
|     def report_state(self): | ||||
| @@ -208,8 +199,8 @@ class Service(object): | ||||
|             try: | ||||
|                 service_ref = db.service_get(ctxt, self.service_id) | ||||
|             except exception.NotFound: | ||||
|                 logging.debug(_("The service database object disappeared, " | ||||
|                                 "Recreating it.")) | ||||
|                 logging.debug(_('The service database object disappeared, ' | ||||
|                                 'Recreating it.')) | ||||
|                 self._create_service_ref(ctxt) | ||||
|                 service_ref = db.service_get(ctxt, self.service_id) | ||||
|  | ||||
| @@ -218,23 +209,24 @@ class Service(object): | ||||
|                              {'report_count': service_ref['report_count'] + 1}) | ||||
|  | ||||
|             # TODO(termie): make this pattern be more elegant. | ||||
|             if getattr(self, "model_disconnected", False): | ||||
|             if getattr(self, 'model_disconnected', False): | ||||
|                 self.model_disconnected = False | ||||
|                 logging.error(_("Recovered model server connection!")) | ||||
|                 logging.error(_('Recovered model server connection!')) | ||||
|  | ||||
|         # TODO(vish): this should probably only catch connection errors | ||||
|         except Exception:  # pylint: disable=W0702 | ||||
|             if not getattr(self, "model_disconnected", False): | ||||
|             if not getattr(self, 'model_disconnected', False): | ||||
|                 self.model_disconnected = True | ||||
|                 logging.exception(_("model server went away")) | ||||
|                 logging.exception(_('model server went away')) | ||||
|  | ||||
|  | ||||
| class WsgiService(object): | ||||
|     """Base class for WSGI based services. | ||||
|  | ||||
|     For each api you define, you must also define these flags: | ||||
|     :<api>_listen:            The address on which to listen | ||||
|     :<api>_listen_port:       The port on which to listen | ||||
|     :<api>_listen: The address on which to listen | ||||
|     :<api>_listen_port: The port on which to listen | ||||
|  | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, conf, apis): | ||||
| @@ -250,13 +242,14 @@ class WsgiService(object): | ||||
|  | ||||
|  | ||||
| class ApiService(WsgiService): | ||||
|     """Class for our nova-api service""" | ||||
|     """Class for our nova-api service.""" | ||||
|  | ||||
|     @classmethod | ||||
|     def create(cls, conf=None): | ||||
|         if not conf: | ||||
|             conf = wsgi.paste_config_file(FLAGS.api_paste_config) | ||||
|             if not conf: | ||||
|                 message = (_("No paste configuration found for: %s"), | ||||
|                 message = (_('No paste configuration found for: %s'), | ||||
|                            FLAGS.api_paste_config) | ||||
|                 raise exception.Error(message) | ||||
|         api_endpoints = ['ec2', 'osapi'] | ||||
| @@ -280,11 +273,11 @@ def serve(*services): | ||||
|         FLAGS.ParseNewFlags() | ||||
|  | ||||
|     name = '_'.join(x.binary for x in services) | ||||
|     logging.debug(_("Serving %s"), name) | ||||
|     logging.debug(_("Full set of FLAGS:")) | ||||
|     logging.debug(_('Serving %s'), name) | ||||
|     logging.debug(_('Full set of FLAGS:')) | ||||
|     for flag in FLAGS: | ||||
|         flag_get = FLAGS.get(flag, None) | ||||
|         logging.debug("%(flag)s : %(flag_get)s" % locals()) | ||||
|         logging.debug('%(flag)s : %(flag_get)s' % locals()) | ||||
|  | ||||
|     for x in services: | ||||
|         x.start() | ||||
| @@ -315,20 +308,20 @@ def serve_wsgi(cls, conf=None): | ||||
|  | ||||
|  | ||||
| def _run_wsgi(paste_config_file, apis): | ||||
|     logging.debug(_("Using paste.deploy config at: %s"), paste_config_file) | ||||
|     logging.debug(_('Using paste.deploy config at: %s'), paste_config_file) | ||||
|     apps = [] | ||||
|     for api in apis: | ||||
|         config = wsgi.load_paste_configuration(paste_config_file, api) | ||||
|         if config is None: | ||||
|             logging.debug(_("No paste configuration for app: %s"), api) | ||||
|             logging.debug(_('No paste configuration for app: %s'), api) | ||||
|             continue | ||||
|         logging.debug(_("App Config: %(api)s\n%(config)r") % locals()) | ||||
|         logging.info(_("Running %s API"), api) | ||||
|         logging.debug(_('App Config: %(api)s\n%(config)r') % locals()) | ||||
|         logging.info(_('Running %s API'), api) | ||||
|         app = wsgi.load_paste_app(paste_config_file, api) | ||||
|         apps.append((app, getattr(FLAGS, "%s_listen_port" % api), | ||||
|                      getattr(FLAGS, "%s_listen" % api))) | ||||
|         apps.append((app, getattr(FLAGS, '%s_listen_port' % api), | ||||
|                      getattr(FLAGS, '%s_listen' % api))) | ||||
|     if len(apps) == 0: | ||||
|         logging.error(_("No known API applications configured in %s."), | ||||
|         logging.error(_('No known API applications configured in %s.'), | ||||
|                       paste_config_file) | ||||
|         return | ||||
|  | ||||
|   | ||||
							
								
								
									
										50
									
								
								nova/test.py
									
									
									
									
									
								
							
							
						
						
									
										50
									
								
								nova/test.py
									
									
									
									
									
								
							| @@ -16,12 +16,12 @@ | ||||
| #    License for the specific language governing permissions and limitations | ||||
| #    under the License. | ||||
|  | ||||
| """ | ||||
| Base classes for our unit tests. | ||||
| Allows overriding of flags for use of fakes, | ||||
| and some black magic for inline callbacks. | ||||
| """ | ||||
| """Base classes for our unit tests. | ||||
|  | ||||
| Allows overriding of flags for use of fakes, and some black magic for | ||||
| inline callbacks. | ||||
|  | ||||
| """ | ||||
|  | ||||
| import datetime | ||||
| import functools | ||||
| @@ -52,9 +52,9 @@ flags.DEFINE_bool('fake_tests', True, | ||||
|  | ||||
|  | ||||
| def skip_if_fake(func): | ||||
|     """Decorator that skips a test if running in fake mode""" | ||||
|     """Decorator that skips a test if running in fake mode.""" | ||||
|     def _skipper(*args, **kw): | ||||
|         """Wrapped skipper function""" | ||||
|         """Wrapped skipper function.""" | ||||
|         if FLAGS.fake_tests: | ||||
|             raise unittest.SkipTest('Test cannot be run in fake mode') | ||||
|         else: | ||||
| @@ -63,9 +63,10 @@ def skip_if_fake(func): | ||||
|  | ||||
|  | ||||
| class TestCase(unittest.TestCase): | ||||
|     """Test case base class for all unit tests""" | ||||
|     """Test case base class for all unit tests.""" | ||||
|  | ||||
|     def setUp(self): | ||||
|         """Run before each test method to initialize test environment""" | ||||
|         """Run before each test method to initialize test environment.""" | ||||
|         super(TestCase, self).setUp() | ||||
|         # NOTE(vish): We need a better method for creating fixtures for tests | ||||
|         #             now that we have some required db setup for the system | ||||
| @@ -86,8 +87,7 @@ class TestCase(unittest.TestCase): | ||||
|         self._original_flags = FLAGS.FlagValuesDict() | ||||
|  | ||||
|     def tearDown(self): | ||||
|         """Runs after each test method to finalize/tear down test | ||||
|         environment.""" | ||||
|         """Runs after each test method to tear down test environment.""" | ||||
|         try: | ||||
|             self.mox.UnsetStubs() | ||||
|             self.stubs.UnsetAll() | ||||
| @@ -121,7 +121,7 @@ class TestCase(unittest.TestCase): | ||||
|                     pass | ||||
|  | ||||
|     def flags(self, **kw): | ||||
|         """Override flag variables for a test""" | ||||
|         """Override flag variables for a test.""" | ||||
|         for k, v in kw.iteritems(): | ||||
|             if k in self.flag_overrides: | ||||
|                 self.reset_flags() | ||||
| @@ -131,7 +131,11 @@ class TestCase(unittest.TestCase): | ||||
|             setattr(FLAGS, k, v) | ||||
|  | ||||
|     def reset_flags(self): | ||||
|         """Resets all flag variables for the test.  Runs after each test""" | ||||
|         """Resets all flag variables for the test. | ||||
|  | ||||
|         Runs after each test. | ||||
|  | ||||
|         """ | ||||
|         FLAGS.Reset() | ||||
|         for k, v in self._original_flags.iteritems(): | ||||
|             setattr(FLAGS, k, v) | ||||
| @@ -158,7 +162,6 @@ class TestCase(unittest.TestCase): | ||||
|  | ||||
|     def _monkey_patch_wsgi(self): | ||||
|         """Allow us to kill servers spawned by wsgi.Server.""" | ||||
|         # TODO(termie): change these patterns to use functools | ||||
|         self.original_start = wsgi.Server.start | ||||
|  | ||||
|         @functools.wraps(self.original_start) | ||||
| @@ -189,12 +192,13 @@ class TestCase(unittest.TestCase): | ||||
|             If you don't care (or don't know) a given value, you can specify | ||||
|             the string DONTCARE as the value. This will cause that dict-item | ||||
|             to be skipped. | ||||
|  | ||||
|         """ | ||||
|         def raise_assertion(msg): | ||||
|             d1str = str(d1) | ||||
|             d2str = str(d2) | ||||
|             base_msg = ("Dictionaries do not match. %(msg)s d1: %(d1str)s " | ||||
|                         "d2: %(d2str)s" % locals()) | ||||
|             base_msg = ('Dictionaries do not match. %(msg)s d1: %(d1str)s ' | ||||
|                         'd2: %(d2str)s' % locals()) | ||||
|             raise AssertionError(base_msg) | ||||
|  | ||||
|         d1keys = set(d1.keys()) | ||||
| @@ -202,8 +206,8 @@ class TestCase(unittest.TestCase): | ||||
|         if d1keys != d2keys: | ||||
|             d1only = d1keys - d2keys | ||||
|             d2only = d2keys - d1keys | ||||
|             raise_assertion("Keys in d1 and not d2: %(d1only)s. " | ||||
|                             "Keys in d2 and not d1: %(d2only)s" % locals()) | ||||
|             raise_assertion('Keys in d1 and not d2: %(d1only)s. ' | ||||
|                             'Keys in d2 and not d1: %(d2only)s' % locals()) | ||||
|  | ||||
|         for key in d1keys: | ||||
|             d1value = d1[key] | ||||
| @@ -217,19 +221,19 @@ class TestCase(unittest.TestCase): | ||||
|                                 "d2['%(key)s']=%(d2value)s" % locals()) | ||||
|  | ||||
|     def assertDictListMatch(self, L1, L2): | ||||
|         """Assert a list of dicts are equivalent""" | ||||
|         """Assert a list of dicts are equivalent.""" | ||||
|         def raise_assertion(msg): | ||||
|             L1str = str(L1) | ||||
|             L2str = str(L2) | ||||
|             base_msg = ("List of dictionaries do not match: %(msg)s " | ||||
|                         "L1: %(L1str)s L2: %(L2str)s" % locals()) | ||||
|             base_msg = ('List of dictionaries do not match: %(msg)s ' | ||||
|                         'L1: %(L1str)s L2: %(L2str)s' % locals()) | ||||
|             raise AssertionError(base_msg) | ||||
|  | ||||
|         L1count = len(L1) | ||||
|         L2count = len(L2) | ||||
|         if L1count != L2count: | ||||
|             raise_assertion("Length mismatch: len(L1)=%(L1count)d != " | ||||
|                             "len(L2)=%(L2count)d" % locals()) | ||||
|             raise_assertion('Length mismatch: len(L1)=%(L1count)d != ' | ||||
|                             'len(L2)=%(L2count)d' % locals()) | ||||
|  | ||||
|         for d1, d2 in zip(L1, L2): | ||||
|             self.assertDictMatch(d1, d2) | ||||
|   | ||||
							
								
								
									
										147
									
								
								nova/utils.py
									
									
									
									
									
								
							
							
						
						
									
										147
									
								
								nova/utils.py
									
									
									
									
									
								
							| @@ -17,9 +17,7 @@ | ||||
| #    License for the specific language governing permissions and limitations | ||||
| #    under the License. | ||||
|  | ||||
| """ | ||||
| System-level utilities and helper functions. | ||||
| """ | ||||
| """Utilities and helper functions.""" | ||||
|  | ||||
| import base64 | ||||
| import datetime | ||||
| @@ -43,9 +41,8 @@ from eventlet import event | ||||
| from eventlet import greenthread | ||||
| from eventlet import semaphore | ||||
| from eventlet.green import subprocess | ||||
| None | ||||
|  | ||||
| from nova import exception | ||||
| from nova.exception import ProcessExecutionError | ||||
| from nova import flags | ||||
| from nova import log as logging | ||||
|  | ||||
| @@ -56,7 +53,7 @@ FLAGS = flags.FLAGS | ||||
|  | ||||
|  | ||||
| def import_class(import_str): | ||||
|     """Returns a class from a string including module and class""" | ||||
|     """Returns a class from a string including module and class.""" | ||||
|     mod_str, _sep, class_str = import_str.rpartition('.') | ||||
|     try: | ||||
|         __import__(mod_str) | ||||
| @@ -67,7 +64,7 @@ def import_class(import_str): | ||||
|  | ||||
|  | ||||
| def import_object(import_str): | ||||
|     """Returns an object including a module or module and class""" | ||||
|     """Returns an object including a module or module and class.""" | ||||
|     try: | ||||
|         __import__(import_str) | ||||
|         return sys.modules[import_str] | ||||
| @@ -99,11 +96,12 @@ def vpn_ping(address, port, timeout=0.05, session_id=None): | ||||
|     cli_id = 64 bit identifier | ||||
|     ? = unknown, probably flags/padding | ||||
|     bit 9 was 1 and the rest were 0 in testing | ||||
|  | ||||
|     """ | ||||
|     if session_id is None: | ||||
|         session_id = random.randint(0, 0xffffffffffffffff) | ||||
|     sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) | ||||
|     data = struct.pack("!BQxxxxxx", 0x38, session_id) | ||||
|     data = struct.pack('!BQxxxxxx', 0x38, session_id) | ||||
|     sock.sendto(data, (address, port)) | ||||
|     sock.settimeout(timeout) | ||||
|     try: | ||||
| @@ -112,7 +110,7 @@ def vpn_ping(address, port, timeout=0.05, session_id=None): | ||||
|         return False | ||||
|     finally: | ||||
|         sock.close() | ||||
|     fmt = "!BQxxxxxQxxxx" | ||||
|     fmt = '!BQxxxxxQxxxx' | ||||
|     if len(received) != struct.calcsize(fmt): | ||||
|         print struct.calcsize(fmt) | ||||
|         return False | ||||
| @@ -122,15 +120,8 @@ def vpn_ping(address, port, timeout=0.05, session_id=None): | ||||
|  | ||||
|  | ||||
| def fetchfile(url, target): | ||||
|     LOG.debug(_("Fetching %s") % url) | ||||
| #    c = pycurl.Curl() | ||||
| #    fp = open(target, "wb") | ||||
| #    c.setopt(c.URL, url) | ||||
| #    c.setopt(c.WRITEDATA, fp) | ||||
| #    c.perform() | ||||
| #    c.close() | ||||
| #    fp.close() | ||||
|     execute("curl", "--fail", url, "-o", target) | ||||
|     LOG.debug(_('Fetching %s') % url) | ||||
|     execute('curl', '--fail', url, '-o', target) | ||||
|  | ||||
|  | ||||
| def execute(*cmd, **kwargs): | ||||
| @@ -147,7 +138,7 @@ def execute(*cmd, **kwargs): | ||||
|     while attempts > 0: | ||||
|         attempts -= 1 | ||||
|         try: | ||||
|             LOG.debug(_("Running cmd (subprocess): %s"), ' '.join(cmd)) | ||||
|             LOG.debug(_('Running cmd (subprocess): %s'), ' '.join(cmd)) | ||||
|             env = os.environ.copy() | ||||
|             if addl_env: | ||||
|                 env.update(addl_env) | ||||
| @@ -163,20 +154,21 @@ def execute(*cmd, **kwargs): | ||||
|                 result = obj.communicate() | ||||
|             obj.stdin.close() | ||||
|             if obj.returncode: | ||||
|                 LOG.debug(_("Result was %s") % obj.returncode) | ||||
|                 LOG.debug(_('Result was %s') % obj.returncode) | ||||
|                 if type(check_exit_code) == types.IntType \ | ||||
|                         and obj.returncode != check_exit_code: | ||||
|                     (stdout, stderr) = result | ||||
|                     raise ProcessExecutionError(exit_code=obj.returncode, | ||||
|                                                 stdout=stdout, | ||||
|                                                 stderr=stderr, | ||||
|                                                 cmd=' '.join(cmd)) | ||||
|                     raise exception.ProcessExecutionError( | ||||
|                             exit_code=obj.returncode, | ||||
|                             stdout=stdout, | ||||
|                             stderr=stderr, | ||||
|                             cmd=' '.join(cmd)) | ||||
|             return result | ||||
|         except ProcessExecutionError: | ||||
|         except exception.ProcessExecutionError: | ||||
|             if not attempts: | ||||
|                 raise | ||||
|             else: | ||||
|                 LOG.debug(_("%r failed. Retrying."), cmd) | ||||
|                 LOG.debug(_('%r failed. Retrying.'), cmd) | ||||
|                 if delay_on_retry: | ||||
|                     greenthread.sleep(random.randint(20, 200) / 100.0) | ||||
|         finally: | ||||
| @@ -188,13 +180,13 @@ def execute(*cmd, **kwargs): | ||||
|  | ||||
| def ssh_execute(ssh, cmd, process_input=None, | ||||
|                 addl_env=None, check_exit_code=True): | ||||
|     LOG.debug(_("Running cmd (SSH): %s"), ' '.join(cmd)) | ||||
|     LOG.debug(_('Running cmd (SSH): %s'), ' '.join(cmd)) | ||||
|     if addl_env: | ||||
|         raise exception.Error("Environment not supported over SSH") | ||||
|         raise exception.Error('Environment not supported over SSH') | ||||
|  | ||||
|     if process_input: | ||||
|         # This is (probably) fixable if we need it... | ||||
|         raise exception.Error("process_input not supported over SSH") | ||||
|         raise exception.Error('process_input not supported over SSH') | ||||
|  | ||||
|     stdin_stream, stdout_stream, stderr_stream = ssh.exec_command(cmd) | ||||
|     channel = stdout_stream.channel | ||||
| @@ -212,7 +204,7 @@ def ssh_execute(ssh, cmd, process_input=None, | ||||
|  | ||||
|     # exit_status == -1 if no exit code was returned | ||||
|     if exit_status != -1: | ||||
|         LOG.debug(_("Result was %s") % exit_status) | ||||
|         LOG.debug(_('Result was %s') % exit_status) | ||||
|         if check_exit_code and exit_status != 0: | ||||
|             raise exception.ProcessExecutionError(exit_code=exit_status, | ||||
|                                                   stdout=stdout, | ||||
| @@ -251,7 +243,7 @@ def debug(arg): | ||||
|  | ||||
|  | ||||
| def runthis(prompt, *cmd, **kwargs): | ||||
|     LOG.debug(_("Running %s"), (" ".join(cmd))) | ||||
|     LOG.debug(_('Running %s'), (' '.join(cmd))) | ||||
|     rv, err = execute(*cmd, **kwargs) | ||||
|  | ||||
|  | ||||
| @@ -266,48 +258,49 @@ def generate_mac(): | ||||
|            random.randint(0x00, 0x7f), | ||||
|            random.randint(0x00, 0xff), | ||||
|            random.randint(0x00, 0xff)] | ||||
|     return ':'.join(map(lambda x: "%02x" % x, mac)) | ||||
|     return ':'.join(map(lambda x: '%02x' % x, mac)) | ||||
|  | ||||
|  | ||||
| # Default symbols to use for passwords. Avoids visually confusing characters. | ||||
| # ~6 bits per symbol | ||||
| DEFAULT_PASSWORD_SYMBOLS = ("23456789"  # Removed: 0,1 | ||||
|                             "ABCDEFGHJKLMNPQRSTUVWXYZ"  # Removed: I, O | ||||
|                             "abcdefghijkmnopqrstuvwxyz")  # Removed: l | ||||
| DEFAULT_PASSWORD_SYMBOLS = ('23456789'  # Removed: 0,1 | ||||
|                             'ABCDEFGHJKLMNPQRSTUVWXYZ'  # Removed: I, O | ||||
|                             'abcdefghijkmnopqrstuvwxyz')  # Removed: l | ||||
|  | ||||
|  | ||||
| # ~5 bits per symbol | ||||
| EASIER_PASSWORD_SYMBOLS = ("23456789"  # Removed: 0, 1 | ||||
|                            "ABCDEFGHJKLMNPQRSTUVWXYZ")  # Removed: I, O | ||||
| EASIER_PASSWORD_SYMBOLS = ('23456789'  # Removed: 0, 1 | ||||
|                            'ABCDEFGHJKLMNPQRSTUVWXYZ')  # Removed: I, O | ||||
|  | ||||
|  | ||||
| def generate_password(length=20, symbols=DEFAULT_PASSWORD_SYMBOLS): | ||||
|     """Generate a random password from the supplied symbols. | ||||
|  | ||||
|     Believed to be reasonably secure (with a reasonable password length!) | ||||
|  | ||||
|     """ | ||||
|     r = random.SystemRandom() | ||||
|     return "".join([r.choice(symbols) for _i in xrange(length)]) | ||||
|     return ''.join([r.choice(symbols) for _i in xrange(length)]) | ||||
|  | ||||
|  | ||||
| def last_octet(address): | ||||
|     return int(address.split(".")[-1]) | ||||
|     return int(address.split('.')[-1]) | ||||
|  | ||||
|  | ||||
| def  get_my_linklocal(interface): | ||||
|     try: | ||||
|         if_str = execute("ip", "-f", "inet6", "-o", "addr", "show", interface) | ||||
|         condition = "\s+inet6\s+([0-9a-f:]+)/\d+\s+scope\s+link" | ||||
|         if_str = execute('ip', '-f', 'inet6', '-o', 'addr', 'show', interface) | ||||
|         condition = '\s+inet6\s+([0-9a-f:]+)/\d+\s+scope\s+link' | ||||
|         links = [re.search(condition, x) for x in if_str[0].split('\n')] | ||||
|         address = [w.group(1) for w in links if w is not None] | ||||
|         if address[0] is not None: | ||||
|             return address[0] | ||||
|         else: | ||||
|             raise exception.Error(_("Link Local address is not found.:%s") | ||||
|             raise exception.Error(_('Link Local address is not found.:%s') | ||||
|                                   % if_str) | ||||
|     except Exception as ex: | ||||
|         raise exception.Error(_("Couldn't get Link Local IP of %(interface)s" | ||||
|                 " :%(ex)s") % locals()) | ||||
|                                 " :%(ex)s") % locals()) | ||||
|  | ||||
|  | ||||
| def to_global_ipv6(prefix, mac): | ||||
| @@ -319,15 +312,15 @@ def to_global_ipv6(prefix, mac): | ||||
|         return (mac64_addr ^ netaddr.IPAddress('::0200:0:0:0') | maskIP).\ | ||||
|                                                                     format() | ||||
|     except TypeError: | ||||
|         raise TypeError(_("Bad mac for to_global_ipv6: %s") % mac) | ||||
|         raise TypeError(_('Bad mac for to_global_ipv6: %s') % mac) | ||||
|  | ||||
|  | ||||
| def to_mac(ipv6_address): | ||||
|     address = netaddr.IPAddress(ipv6_address) | ||||
|     mask1 = netaddr.IPAddress("::ffff:ffff:ffff:ffff") | ||||
|     mask2 = netaddr.IPAddress("::0200:0:0:0") | ||||
|     mask1 = netaddr.IPAddress('::ffff:ffff:ffff:ffff') | ||||
|     mask2 = netaddr.IPAddress('::0200:0:0:0') | ||||
|     mac64 = netaddr.EUI(int(address & mask1 ^ mask2)).words | ||||
|     return ":".join(["%02x" % i for i in mac64[0:3] + mac64[5:8]]) | ||||
|     return ':'.join(['%02x' % i for i in mac64[0:3] + mac64[5:8]]) | ||||
|  | ||||
|  | ||||
| def utcnow(): | ||||
| @@ -341,7 +334,7 @@ utcnow.override_time = None | ||||
|  | ||||
|  | ||||
| def is_older_than(before, seconds): | ||||
|     """Return True if before is older than seconds""" | ||||
|     """Return True if before is older than seconds.""" | ||||
|     return utcnow() - before > datetime.timedelta(seconds=seconds) | ||||
|  | ||||
|  | ||||
| @@ -379,7 +372,7 @@ def isotime(at=None): | ||||
|  | ||||
|  | ||||
| def parse_isotime(timestr): | ||||
|     """Turn an iso formatted time back into a datetime""" | ||||
|     """Turn an iso formatted time back into a datetime.""" | ||||
|     return datetime.datetime.strptime(timestr, TIME_FORMAT) | ||||
|  | ||||
|  | ||||
| @@ -433,16 +426,19 @@ class LazyPluggable(object): | ||||
|  | ||||
|  | ||||
| class LoopingCallDone(Exception): | ||||
|     """The poll-function passed to LoopingCall can raise this exception to | ||||
|     """Exception to break out and stop a LoopingCall. | ||||
|  | ||||
|     The poll-function passed to LoopingCall can raise this exception to | ||||
|     break out of the loop normally. This is somewhat analogous to | ||||
|     StopIteration. | ||||
|  | ||||
|     An optional return-value can be included as the argument to the exception; | ||||
|     this return-value will be returned by LoopingCall.wait() | ||||
|  | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, retvalue=True): | ||||
|         """:param retvalue: Value that LoopingCall.wait() should return""" | ||||
|         """:param retvalue: Value that LoopingCall.wait() should return.""" | ||||
|         self.retvalue = retvalue | ||||
|  | ||||
|  | ||||
| @@ -493,7 +489,7 @@ def xhtml_escape(value): | ||||
|     http://github.com/facebook/tornado/blob/master/tornado/escape.py | ||||
|  | ||||
|     """ | ||||
|     return saxutils.escape(value, {'"': """}) | ||||
|     return saxutils.escape(value, {'"': '"'}) | ||||
|  | ||||
|  | ||||
| def utf8(value): | ||||
| @@ -504,7 +500,7 @@ def utf8(value): | ||||
|  | ||||
|     """ | ||||
|     if isinstance(value, unicode): | ||||
|         return value.encode("utf-8") | ||||
|         return value.encode('utf-8') | ||||
|     assert isinstance(value, str) | ||||
|     return value | ||||
|  | ||||
| @@ -554,7 +550,7 @@ class _NoopContextManager(object): | ||||
|  | ||||
|  | ||||
| def synchronized(name, external=False): | ||||
|     """Synchronization decorator | ||||
|     """Synchronization decorator. | ||||
|  | ||||
|     Decorating a method like so: | ||||
|     @synchronized('mylock') | ||||
| @@ -578,6 +574,7 @@ def synchronized(name, external=False): | ||||
|     multiple processes. This means that if two different workers both run a | ||||
|     a method decorated with @synchronized('mylock', external=True), only one | ||||
|     of them will execute at a time. | ||||
|  | ||||
|     """ | ||||
|  | ||||
|     def wrap(f): | ||||
| @@ -590,13 +587,13 @@ def synchronized(name, external=False): | ||||
|                 _semaphores[name] = semaphore.Semaphore() | ||||
|             sem = _semaphores[name] | ||||
|             LOG.debug(_('Attempting to grab semaphore "%(lock)s" for method ' | ||||
|                       '"%(method)s"...' % {"lock": name, | ||||
|                                            "method": f.__name__})) | ||||
|                         '"%(method)s"...' % {'lock': name, | ||||
|                                              'method': f.__name__})) | ||||
|             with sem: | ||||
|                 if external: | ||||
|                     LOG.debug(_('Attempting to grab file lock "%(lock)s" for ' | ||||
|                                 'method "%(method)s"...' % | ||||
|                                 {"lock": name, "method": f.__name__})) | ||||
|                                 {'lock': name, 'method': f.__name__})) | ||||
|                     lock_file_path = os.path.join(FLAGS.lock_path, | ||||
|                                                   'nova-%s.lock' % name) | ||||
|                     lock = lockfile.FileLock(lock_file_path) | ||||
| @@ -617,21 +614,23 @@ def synchronized(name, external=False): | ||||
|  | ||||
|  | ||||
| def get_from_path(items, path): | ||||
|     """ Returns a list of items matching the specified path.  Takes an | ||||
|     XPath-like expression e.g. prop1/prop2/prop3, and for each item in items, | ||||
|     looks up items[prop1][prop2][prop3].  Like XPath, if any of the | ||||
|     """Returns a list of items matching the specified path. | ||||
|  | ||||
|     Takes an XPath-like expression e.g. prop1/prop2/prop3, and for each item | ||||
|     in items, looks up items[prop1][prop2][prop3].  Like XPath, if any of the | ||||
|     intermediate results are lists it will treat each list item individually. | ||||
|     A 'None' in items or any child expressions will be ignored, this function | ||||
|     will not throw because of None (anywhere) in items.  The returned list | ||||
|     will contain no None values.""" | ||||
|     will contain no None values. | ||||
|  | ||||
|     """ | ||||
|     if path is None: | ||||
|         raise exception.Error("Invalid mini_xpath") | ||||
|         raise exception.Error('Invalid mini_xpath') | ||||
|  | ||||
|     (first_token, sep, remainder) = path.partition("/") | ||||
|     (first_token, sep, remainder) = path.partition('/') | ||||
|  | ||||
|     if first_token == "": | ||||
|         raise exception.Error("Invalid mini_xpath") | ||||
|     if first_token == '': | ||||
|         raise exception.Error('Invalid mini_xpath') | ||||
|  | ||||
|     results = [] | ||||
|  | ||||
| @@ -645,7 +644,7 @@ def get_from_path(items, path): | ||||
|     for item in items: | ||||
|         if item is None: | ||||
|             continue | ||||
|         get_method = getattr(item, "get", None) | ||||
|         get_method = getattr(item, 'get', None) | ||||
|         if get_method is None: | ||||
|             continue | ||||
|         child = get_method(first_token) | ||||
| @@ -666,7 +665,7 @@ def get_from_path(items, path): | ||||
|  | ||||
|  | ||||
| def flatten_dict(dict_, flattened=None): | ||||
|     """Recursively flatten a nested dictionary""" | ||||
|     """Recursively flatten a nested dictionary.""" | ||||
|     flattened = flattened or {} | ||||
|     for key, value in dict_.iteritems(): | ||||
|         if hasattr(value, 'iteritems'): | ||||
| @@ -677,9 +676,7 @@ def flatten_dict(dict_, flattened=None): | ||||
|  | ||||
|  | ||||
| def partition_dict(dict_, keys): | ||||
|     """Return two dicts, one containing only `keys` the other containing | ||||
|     everything but `keys` | ||||
|     """ | ||||
|     """Return two dicts, one with `keys` the other with everything else.""" | ||||
|     intersection = {} | ||||
|     difference = {} | ||||
|     for key, value in dict_.iteritems(): | ||||
| @@ -691,9 +688,7 @@ def partition_dict(dict_, keys): | ||||
|  | ||||
|  | ||||
| def map_dict_keys(dict_, key_map): | ||||
|     """Return a dictionary in which the dictionaries keys are mapped to | ||||
|     new keys. | ||||
|     """ | ||||
|     """Return a dict in which the dictionaries keys are mapped to new keys.""" | ||||
|     mapped = {} | ||||
|     for key, value in dict_.iteritems(): | ||||
|         mapped_key = key_map[key] if key in key_map else key | ||||
| @@ -702,15 +697,15 @@ def map_dict_keys(dict_, key_map): | ||||
|  | ||||
|  | ||||
| def subset_dict(dict_, keys): | ||||
|     """Return a dict that only contains a subset of keys""" | ||||
|     """Return a dict that only contains a subset of keys.""" | ||||
|     subset = partition_dict(dict_, keys)[0] | ||||
|     return subset | ||||
|  | ||||
|  | ||||
| def check_isinstance(obj, cls): | ||||
|     """Checks that obj is of type cls, and lets PyLint infer types""" | ||||
|     """Checks that obj is of type cls, and lets PyLint infer types.""" | ||||
|     if isinstance(obj, cls): | ||||
|         return obj | ||||
|     raise Exception(_("Expected object of type: %s") % (str(cls))) | ||||
|     raise Exception(_('Expected object of type: %s') % (str(cls))) | ||||
|     # TODO(justinsb): Can we make this better?? | ||||
|     return cls()  # Ugly PyLint hack | ||||
|   | ||||
							
								
								
									
										144
									
								
								nova/wsgi.py
									
									
									
									
									
								
							
							
						
						
									
										144
									
								
								nova/wsgi.py
									
									
									
									
									
								
							| @@ -17,9 +17,7 @@ | ||||
| #    License for the specific language governing permissions and limitations | ||||
| #    under the License. | ||||
|  | ||||
| """ | ||||
| Utility methods for working with WSGI servers | ||||
| """ | ||||
| """Utility methods for working with WSGI servers.""" | ||||
|  | ||||
| import os | ||||
| import sys | ||||
| @@ -33,7 +31,6 @@ import routes.middleware | ||||
| import webob | ||||
| import webob.dec | ||||
| import webob.exc | ||||
|  | ||||
| from paste import deploy | ||||
|  | ||||
| from nova import exception | ||||
| @@ -66,7 +63,7 @@ class Server(object): | ||||
|     def start(self, application, port, host='0.0.0.0', backlog=128): | ||||
|         """Run a WSGI server with the given application.""" | ||||
|         arg0 = sys.argv[0] | ||||
|         logging.audit(_("Starting %(arg0)s on %(host)s:%(port)s") % locals()) | ||||
|         logging.audit(_('Starting %(arg0)s on %(host)s:%(port)s') % locals()) | ||||
|         socket = eventlet.listen((host, port), backlog=backlog) | ||||
|         self.pool.spawn_n(self._run, application, socket) | ||||
|  | ||||
| @@ -87,30 +84,31 @@ class Server(object): | ||||
| class Request(webob.Request): | ||||
|  | ||||
|     def best_match_content_type(self): | ||||
|         """ | ||||
|         Determine the most acceptable content-type based on the | ||||
|         query extension then the Accept header | ||||
|         """Determine the most acceptable content-type. | ||||
|  | ||||
|         Based on the query extension then the Accept header. | ||||
|  | ||||
|         """ | ||||
|  | ||||
|         parts = self.path.rsplit(".", 1) | ||||
|         parts = self.path.rsplit('.', 1) | ||||
|  | ||||
|         if len(parts) > 1: | ||||
|             format = parts[1] | ||||
|             if format in ["json", "xml"]: | ||||
|                 return "application/{0}".format(parts[1]) | ||||
|             if format in ['json', 'xml']: | ||||
|                 return 'application/{0}'.format(parts[1]) | ||||
|  | ||||
|         ctypes = ["application/json", "application/xml"] | ||||
|         ctypes = ['application/json', 'application/xml'] | ||||
|         bm = self.accept.best_match(ctypes) | ||||
|  | ||||
|         return bm or "application/json" | ||||
|         return bm or 'application/json' | ||||
|  | ||||
|     def get_content_type(self): | ||||
|         try: | ||||
|             ct = self.headers["Content-Type"] | ||||
|             assert ct in ("application/xml", "application/json") | ||||
|             ct = self.headers['Content-Type'] | ||||
|             assert ct in ('application/xml', 'application/json') | ||||
|             return ct | ||||
|         except Exception: | ||||
|             raise webob.exc.HTTPBadRequest("Invalid content type") | ||||
|             raise webob.exc.HTTPBadRequest('Invalid content type') | ||||
|  | ||||
|  | ||||
| class Application(object): | ||||
| @@ -118,7 +116,7 @@ class Application(object): | ||||
|  | ||||
|     @classmethod | ||||
|     def factory(cls, global_config, **local_config): | ||||
|         """Used for paste app factories in paste.deploy config fles. | ||||
|         """Used for paste app factories in paste.deploy config files. | ||||
|  | ||||
|         Any local configuration (that is, values under the [app:APPNAME] | ||||
|         section of the paste config) will be passed into the `__init__` method | ||||
| @@ -173,8 +171,9 @@ class Application(object): | ||||
|  | ||||
|         See the end of http://pythonpaste.org/webob/modules/dec.html | ||||
|         for more info. | ||||
|  | ||||
|         """ | ||||
|         raise NotImplementedError(_("You must implement __call__")) | ||||
|         raise NotImplementedError(_('You must implement __call__')) | ||||
|  | ||||
|  | ||||
| class Middleware(Application): | ||||
| @@ -184,11 +183,12 @@ class Middleware(Application): | ||||
|     initialized that will be called next.  By default the middleware will | ||||
|     simply call its wrapped app, or you can override __call__ to customize its | ||||
|     behavior. | ||||
|  | ||||
|     """ | ||||
|  | ||||
|     @classmethod | ||||
|     def factory(cls, global_config, **local_config): | ||||
|         """Used for paste app factories in paste.deploy config fles. | ||||
|         """Used for paste app factories in paste.deploy config files. | ||||
|  | ||||
|         Any local configuration (that is, values under the [filter:APPNAME] | ||||
|         section of the paste config) will be passed into the `__init__` method | ||||
| @@ -240,20 +240,24 @@ class Middleware(Application): | ||||
|  | ||||
|  | ||||
| class Debug(Middleware): | ||||
|     """Helper class that can be inserted into any WSGI application chain | ||||
|     to get information about the request and response.""" | ||||
|     """Helper class for debugging a WSGI application. | ||||
|  | ||||
|     Can be inserted into any WSGI application chain to get information | ||||
|     about the request and response. | ||||
|  | ||||
|     """ | ||||
|  | ||||
|     @webob.dec.wsgify(RequestClass=Request) | ||||
|     def __call__(self, req): | ||||
|         print ("*" * 40) + " REQUEST ENVIRON" | ||||
|         print ('*' * 40) + ' REQUEST ENVIRON' | ||||
|         for key, value in req.environ.items(): | ||||
|             print key, "=", value | ||||
|             print key, '=', value | ||||
|         print | ||||
|         resp = req.get_response(self.application) | ||||
|  | ||||
|         print ("*" * 40) + " RESPONSE HEADERS" | ||||
|         print ('*' * 40) + ' RESPONSE HEADERS' | ||||
|         for (key, value) in resp.headers.iteritems(): | ||||
|             print key, "=", value | ||||
|             print key, '=', value | ||||
|         print | ||||
|  | ||||
|         resp.app_iter = self.print_generator(resp.app_iter) | ||||
| @@ -262,11 +266,8 @@ class Debug(Middleware): | ||||
|  | ||||
|     @staticmethod | ||||
|     def print_generator(app_iter): | ||||
|         """ | ||||
|         Iterator that prints the contents of a wrapper string iterator | ||||
|         when iterated. | ||||
|         """ | ||||
|         print ("*" * 40) + " BODY" | ||||
|         """Iterator that prints the contents of a wrapper string.""" | ||||
|         print ('*' * 40) + ' BODY' | ||||
|         for part in app_iter: | ||||
|             sys.stdout.write(part) | ||||
|             sys.stdout.flush() | ||||
| @@ -275,13 +276,10 @@ class Debug(Middleware): | ||||
|  | ||||
|  | ||||
| class Router(object): | ||||
|     """ | ||||
|     WSGI middleware that maps incoming requests to WSGI apps. | ||||
|     """ | ||||
|     """WSGI middleware that maps incoming requests to WSGI apps.""" | ||||
|  | ||||
|     def __init__(self, mapper): | ||||
|         """ | ||||
|         Create a router for the given routes.Mapper. | ||||
|         """Create a router for the given routes.Mapper. | ||||
|  | ||||
|         Each route in `mapper` must specify a 'controller', which is a | ||||
|         WSGI app to call.  You'll probably want to specify an 'action' as | ||||
| @@ -293,15 +291,16 @@ class Router(object): | ||||
|           sc = ServerController() | ||||
|  | ||||
|           # Explicit mapping of one route to a controller+action | ||||
|           mapper.connect(None, "/svrlist", controller=sc, action="list") | ||||
|           mapper.connect(None, '/svrlist', controller=sc, action='list') | ||||
|  | ||||
|           # Actions are all implicitly defined | ||||
|           mapper.resource("server", "servers", controller=sc) | ||||
|           mapper.resource('server', 'servers', controller=sc) | ||||
|  | ||||
|           # Pointing to an arbitrary WSGI app.  You can specify the | ||||
|           # {path_info:.*} parameter so the target app can be handed just that | ||||
|           # section of the URL. | ||||
|           mapper.connect(None, "/v1.0/{path_info:.*}", controller=BlogApp()) | ||||
|           mapper.connect(None, '/v1.0/{path_info:.*}', controller=BlogApp()) | ||||
|  | ||||
|         """ | ||||
|         self.map = mapper | ||||
|         self._router = routes.middleware.RoutesMiddleware(self._dispatch, | ||||
| @@ -309,19 +308,22 @@ class Router(object): | ||||
|  | ||||
|     @webob.dec.wsgify(RequestClass=Request) | ||||
|     def __call__(self, req): | ||||
|         """ | ||||
|         Route the incoming request to a controller based on self.map. | ||||
|         """Route the incoming request to a controller based on self.map. | ||||
|  | ||||
|         If no match, return a 404. | ||||
|  | ||||
|         """ | ||||
|         return self._router | ||||
|  | ||||
|     @staticmethod | ||||
|     @webob.dec.wsgify(RequestClass=Request) | ||||
|     def _dispatch(req): | ||||
|         """ | ||||
|         """Dispatch the request to the appropriate controller. | ||||
|  | ||||
|         Called by self._router after matching the incoming request to a route | ||||
|         and putting the information into req.environ.  Either returns 404 | ||||
|         or the routed WSGI app's response. | ||||
|  | ||||
|         """ | ||||
|         match = req.environ['wsgiorg.routing_args'][1] | ||||
|         if not match: | ||||
| @@ -331,19 +333,19 @@ class Router(object): | ||||
|  | ||||
|  | ||||
| class Controller(object): | ||||
|     """ | ||||
|     """WSGI app that dispatched to methods. | ||||
|  | ||||
|     WSGI app that reads routing information supplied by RoutesMiddleware | ||||
|     and calls the requested action method upon itself.  All action methods | ||||
|     must, in addition to their normal parameters, accept a 'req' argument | ||||
|     which is the incoming wsgi.Request.  They raise a webob.exc exception, | ||||
|     or return a dict which will be serialized by requested content type. | ||||
|  | ||||
|     """ | ||||
|  | ||||
|     @webob.dec.wsgify(RequestClass=Request) | ||||
|     def __call__(self, req): | ||||
|         """ | ||||
|         Call the method specified in req.environ by RoutesMiddleware. | ||||
|         """ | ||||
|         """Call the method specified in req.environ by RoutesMiddleware.""" | ||||
|         arg_dict = req.environ['wsgiorg.routing_args'][1] | ||||
|         action = arg_dict['action'] | ||||
|         method = getattr(self, action) | ||||
| @@ -361,7 +363,7 @@ class Controller(object): | ||||
|             body = self._serialize(result, content_type, default_xmlns) | ||||
|  | ||||
|             response = webob.Response() | ||||
|             response.headers["Content-Type"] = content_type | ||||
|             response.headers['Content-Type'] = content_type | ||||
|             response.body = body | ||||
|             msg_dict = dict(url=req.url, status=response.status_int) | ||||
|             msg = _("%(url)s returned with HTTP %(status)d") % msg_dict | ||||
| @@ -371,12 +373,13 @@ class Controller(object): | ||||
|             return result | ||||
|  | ||||
|     def _serialize(self, data, content_type, default_xmlns): | ||||
|         """ | ||||
|         Serialize the given dict to the provided content_type. | ||||
|         """Serialize the given dict to the provided content_type. | ||||
|  | ||||
|         Uses self._serialization_metadata if it exists, which is a dict mapping | ||||
|         MIME types to information needed to serialize to that type. | ||||
|  | ||||
|         """ | ||||
|         _metadata = getattr(type(self), "_serialization_metadata", {}) | ||||
|         _metadata = getattr(type(self), '_serialization_metadata', {}) | ||||
|  | ||||
|         serializer = Serializer(_metadata, default_xmlns) | ||||
|         try: | ||||
| @@ -385,12 +388,13 @@ class Controller(object): | ||||
|             raise webob.exc.HTTPNotAcceptable() | ||||
|  | ||||
|     def _deserialize(self, data, content_type): | ||||
|         """ | ||||
|         Deserialize the request body to the specefied content type. | ||||
|         """Deserialize the request body to the specefied content type. | ||||
|  | ||||
|         Uses self._serialization_metadata if it exists, which is a dict mapping | ||||
|         MIME types to information needed to serialize to that type. | ||||
|  | ||||
|         """ | ||||
|         _metadata = getattr(type(self), "_serialization_metadata", {}) | ||||
|         _metadata = getattr(type(self), '_serialization_metadata', {}) | ||||
|         serializer = Serializer(_metadata) | ||||
|         return serializer.deserialize(data, content_type) | ||||
|  | ||||
| @@ -400,23 +404,22 @@ class Controller(object): | ||||
|  | ||||
|  | ||||
| class Serializer(object): | ||||
|     """ | ||||
|     Serializes and deserializes dictionaries to certain MIME types. | ||||
|     """ | ||||
|     """Serializes and deserializes dictionaries to certain MIME types.""" | ||||
|  | ||||
|     def __init__(self, metadata=None, default_xmlns=None): | ||||
|         """ | ||||
|         Create a serializer based on the given WSGI environment. | ||||
|         """Create a serializer based on the given WSGI environment. | ||||
|  | ||||
|         'metadata' is an optional dict mapping MIME types to information | ||||
|         needed to serialize a dictionary to that type. | ||||
|  | ||||
|         """ | ||||
|         self.metadata = metadata or {} | ||||
|         self.default_xmlns = default_xmlns | ||||
|  | ||||
|     def _get_serialize_handler(self, content_type): | ||||
|         handlers = { | ||||
|             "application/json": self._to_json, | ||||
|             "application/xml": self._to_xml, | ||||
|             'application/json': self._to_json, | ||||
|             'application/xml': self._to_xml, | ||||
|         } | ||||
|  | ||||
|         try: | ||||
| @@ -425,29 +428,27 @@ class Serializer(object): | ||||
|             raise exception.InvalidContentType() | ||||
|  | ||||
|     def serialize(self, data, content_type): | ||||
|         """ | ||||
|         Serialize a dictionary into a string of the specified content type. | ||||
|         """ | ||||
|         """Serialize a dictionary into the specified content type.""" | ||||
|         return self._get_serialize_handler(content_type)(data) | ||||
|  | ||||
|     def deserialize(self, datastring, content_type): | ||||
|         """ | ||||
|         Deserialize a string to a dictionary. | ||||
|         """Deserialize a string to a dictionary. | ||||
|  | ||||
|         The string must be in the format of a supported MIME type. | ||||
|  | ||||
|         """ | ||||
|         return self.get_deserialize_handler(content_type)(datastring) | ||||
|  | ||||
|     def get_deserialize_handler(self, content_type): | ||||
|         handlers = { | ||||
|             "application/json": self._from_json, | ||||
|             "application/xml": self._from_xml, | ||||
|             'application/json': self._from_json, | ||||
|             'application/xml': self._from_xml, | ||||
|         } | ||||
|  | ||||
|         try: | ||||
|             return handlers[content_type] | ||||
|         except Exception: | ||||
|             raise exception.InvalidContentType(_("Invalid content type %s" | ||||
|             raise exception.InvalidContentType(_('Invalid content type %s' | ||||
|                                                  % content_type)) | ||||
|  | ||||
|     def _from_json(self, datastring): | ||||
| @@ -460,11 +461,11 @@ class Serializer(object): | ||||
|         return {node.nodeName: self._from_xml_node(node, plurals)} | ||||
|  | ||||
|     def _from_xml_node(self, node, listnames): | ||||
|         """ | ||||
|         Convert a minidom node to a simple Python type. | ||||
|         """Convert a minidom node to a simple Python type. | ||||
|  | ||||
|         listnames is a collection of names of XML nodes whose subnodes should | ||||
|         be considered list items. | ||||
|  | ||||
|         """ | ||||
|         if len(node.childNodes) == 1 and node.childNodes[0].nodeType == 3: | ||||
|             return node.childNodes[0].nodeValue | ||||
| @@ -571,7 +572,6 @@ def paste_config_file(basename): | ||||
|     * /etc/nova, which may not be diffrerent from state_path on your distro | ||||
|  | ||||
|     """ | ||||
|  | ||||
|     configfiles = [basename, | ||||
|                    os.path.join(FLAGS.state_path, 'etc', 'nova', basename), | ||||
|                    os.path.join(FLAGS.state_path, 'etc', basename), | ||||
| @@ -587,7 +587,7 @@ def load_paste_configuration(filename, appname): | ||||
|     filename = os.path.abspath(filename) | ||||
|     config = None | ||||
|     try: | ||||
|         config = deploy.appconfig("config:%s" % filename, name=appname) | ||||
|         config = deploy.appconfig('config:%s' % filename, name=appname) | ||||
|     except LookupError: | ||||
|         pass | ||||
|     return config | ||||
| @@ -598,7 +598,7 @@ def load_paste_app(filename, appname): | ||||
|     filename = os.path.abspath(filename) | ||||
|     app = None | ||||
|     try: | ||||
|         app = deploy.loadapp("config:%s" % filename, name=appname) | ||||
|         app = deploy.loadapp('config:%s' % filename, name=appname) | ||||
|     except LookupError: | ||||
|         pass | ||||
|     return app | ||||
|   | ||||
		Reference in New Issue
	
	Block a user
	 termie
					termie