Browse Source

remove old openstack incubator project reference

the project was using the incubator oslo project. this patch set
will remove all these incubator project and use dependencies in
requirements.txt to indicate oslo requirements.

Change-Id: Ic8975f4ad37351e68ab0276db9781ac36f8ee645
Tong Li 3 years ago
parent
commit
207c762df6
34 changed files with 15 additions and 3766 deletions
  1. 0
    0
      kiloeyes/common/timeutils.py
  2. 1
    1
      kiloeyes/microservice/es_persister.py
  3. 1
    1
      kiloeyes/microservice/notification_engine.py
  4. 2
    2
      kiloeyes/microservice/server.py
  5. 1
    1
      kiloeyes/microservice/threshold_engine.py
  6. 1
    2
      kiloeyes/microservice/threshold_processor.py
  7. 0
    0
      kiloeyes/openstack/__init__.py
  8. 0
    17
      kiloeyes/openstack/common/__init__.py
  9. 0
    145
      kiloeyes/openstack/common/eventlet_backdoor.py
  10. 0
    113
      kiloeyes/openstack/common/excutils.py
  11. 0
    146
      kiloeyes/openstack/common/fileutils.py
  12. 0
    0
      kiloeyes/openstack/common/fixture/__init__.py
  13. 0
    85
      kiloeyes/openstack/common/fixture/config.py
  14. 0
    51
      kiloeyes/openstack/common/fixture/lockutils.py
  15. 0
    34
      kiloeyes/openstack/common/fixture/logging.py
  16. 0
    62
      kiloeyes/openstack/common/fixture/mockpatch.py
  17. 0
    43
      kiloeyes/openstack/common/fixture/moxstubout.py
  18. 0
    479
      kiloeyes/openstack/common/gettextutils.py
  19. 0
    73
      kiloeyes/openstack/common/importutils.py
  20. 0
    202
      kiloeyes/openstack/common/jsonutils.py
  21. 0
    45
      kiloeyes/openstack/common/local.py
  22. 0
    322
      kiloeyes/openstack/common/lockutils.py
  23. 0
    713
      kiloeyes/openstack/common/log.py
  24. 0
    147
      kiloeyes/openstack/common/loopingcall.py
  25. 0
    512
      kiloeyes/openstack/common/service.py
  26. 0
    311
      kiloeyes/openstack/common/strutils.py
  27. 0
    106
      kiloeyes/openstack/common/systemd.py
  28. 0
    147
      kiloeyes/openstack/common/threadgroup.py
  29. 2
    0
      kiloeyes/service.py
  30. 2
    2
      kiloeyes/tests/common/test_es_conn.py
  31. 2
    2
      kiloeyes/tests/microservice/test_strategy.py
  32. 1
    1
      kiloeyes/tests/microservice/test_threshold_processor.py
  33. 1
    1
      kiloeyes/v2/elasticsearch/metrics.py
  34. 1
    0
      requirements.txt

kiloeyes/openstack/common/timeutils.py → kiloeyes/common/timeutils.py View File


+ 1
- 1
kiloeyes/microservice/es_persister.py View File

@@ -15,12 +15,12 @@
15 15
 
16 16
 from oslo_config import cfg
17 17
 from oslo_log import log
18
+from oslo_service import service as os_service
18 19
 from stevedore import driver
19 20
 
20 21
 from kiloeyes.common import es_conn
21 22
 from kiloeyes.common import kafka_conn
22 23
 from kiloeyes.common import namespace
23
-from kiloeyes.openstack.common import service as os_service
24 24
 
25 25
 OPTS = [
26 26
     cfg.StrOpt('topic', default='metrics',

+ 1
- 1
kiloeyes/microservice/notification_engine.py View File

@@ -14,12 +14,12 @@
14 14
 
15 15
 from oslo_config import cfg
16 16
 from oslo_log import log
17
+from oslo_service import service as os_service
17 18
 from stevedore import driver
18 19
 
19 20
 from kiloeyes.common import es_conn
20 21
 from kiloeyes.common import kafka_conn
21 22
 from kiloeyes.common import namespace
22
-from kiloeyes.openstack.common import service as os_service
23 23
 
24 24
 NOTIFICATION_ENGINE_OPTS = [
25 25
     cfg.StrOpt('topic',

+ 2
- 2
kiloeyes/microservice/server.py View File

@@ -15,10 +15,10 @@
15 15
 
16 16
 from oslo_config import cfg
17 17
 from oslo_log import log
18
+from oslo_service import service as os_service
18 19
 from stevedore import driver
19 20
 
20 21
 from kiloeyes.common import namespace
21
-from kiloeyes.openstack.common import service as os_service
22 22
 from kiloeyes import service
23 23
 
24 24
 
@@ -40,7 +40,7 @@ def main():
40 40
                   'in the configuration file.')
41 41
         return None
42 42
 
43
-    launcher = os_service.ServiceLauncher()
43
+    launcher = os_service.ServiceLauncher(cfg.CONF)
44 44
 
45 45
     # Now load the micro service
46 46
     service_driver = driver.DriverManager(

+ 1
- 1
kiloeyes/microservice/threshold_engine.py View File

@@ -14,6 +14,7 @@
14 14
 
15 15
 from oslo_config import cfg
16 16
 from oslo_log import log
17
+from oslo_service import service as os_service
17 18
 from stevedore import driver
18 19
 import threading
19 20
 import time
@@ -21,7 +22,6 @@ import time
21 22
 from kiloeyes.common import es_conn
22 23
 from kiloeyes.common import kafka_conn
23 24
 from kiloeyes.common import namespace
24
-from kiloeyes.openstack.common import service as os_service
25 25
 
26 26
 lock = threading.RLock()
27 27
 

+ 1
- 2
kiloeyes/microservice/threshold_processor.py View File

@@ -20,8 +20,7 @@ import uuid
20 20
 
21 21
 from kiloeyes.common import alarm_expr_calculator as calculator
22 22
 from kiloeyes.common import alarm_expr_parser as parser
23
-from kiloeyes.openstack.common import timeutils as tu
24
-
23
+from kiloeyes.common import timeutils as tu
25 24
 
26 25
 LOG = log.getLogger(__name__)
27 26
 

+ 0
- 0
kiloeyes/openstack/__init__.py View File


+ 0
- 17
kiloeyes/openstack/common/__init__.py View File

@@ -1,17 +0,0 @@
1
-#
2
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
3
-#    not use this file except in compliance with the License. You may obtain
4
-#    a copy of the License at
5
-#
6
-#         http://www.apache.org/licenses/LICENSE-2.0
7
-#
8
-#    Unless required by applicable law or agreed to in writing, software
9
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
10
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
11
-#    License for the specific language governing permissions and limitations
12
-#    under the License.
13
-
14
-import six
15
-
16
-
17
-six.add_move(six.MovedModule('mox', 'mox', 'mox3.mox'))

+ 0
- 145
kiloeyes/openstack/common/eventlet_backdoor.py View File

@@ -1,145 +0,0 @@
1
-# Copyright (c) 2012 OpenStack Foundation.
2
-# Administrator of the National Aeronautics and Space Administration.
3
-# All Rights Reserved.
4
-#
5
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
6
-#    not use this file except in compliance with the License. You may obtain
7
-#    a copy of the License at
8
-#
9
-#         http://www.apache.org/licenses/LICENSE-2.0
10
-#
11
-#    Unless required by applicable law or agreed to in writing, software
12
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
13
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
14
-#    License for the specific language governing permissions and limitations
15
-#    under the License.
16
-
17
-from __future__ import print_function
18
-
19
-import errno
20
-import gc
21
-import os
22
-import pprint
23
-import socket
24
-import sys
25
-import traceback
26
-
27
-import eventlet
28
-import eventlet.backdoor
29
-import greenlet
30
-from oslo_config import cfg
31
-
32
-from kiloeyes.openstack.common.gettextutils import _LI
33
-from oslo_log import log as logging
34
-
35
-help_for_backdoor_port = (
36
-    "Acceptable values are 0, <port>, and <start>:<end>, where 0 results "
37
-    "in listening on a random tcp port number; <port> results in listening "
38
-    "on the specified port number (and not enabling backdoor if that port "
39
-    "is in use); and <start>:<end> results in listening on the smallest "
40
-    "unused port number within the specified range of port numbers.  The "
41
-    "chosen port is displayed in the service's log file.")
42
-eventlet_backdoor_opts = [
43
-    cfg.StrOpt('backdoor_port',
44
-               help="Enable eventlet backdoor.  %s" % help_for_backdoor_port)
45
-]
46
-
47
-CONF = cfg.CONF
48
-CONF.register_opts(eventlet_backdoor_opts)
49
-LOG = logging.getLogger(__name__)
50
-
51
-
52
-class EventletBackdoorConfigValueError(Exception):
53
-    def __init__(self, port_range, help_msg, ex):
54
-        msg = ('Invalid backdoor_port configuration %(range)s: %(ex)s. '
55
-               '%(help)s' %
56
-               {'range': port_range, 'ex': ex, 'help': help_msg})
57
-        super(EventletBackdoorConfigValueError, self).__init__(msg)
58
-        self.port_range = port_range
59
-
60
-
61
-def _dont_use_this():
62
-    print("Don't use this, just disconnect instead")
63
-
64
-
65
-def _find_objects(t):
66
-    return [o for o in gc.get_objects() if isinstance(o, t)]
67
-
68
-
69
-def _print_greenthreads():
70
-    for i, gt in enumerate(_find_objects(greenlet.greenlet)):
71
-        print(i, gt)
72
-        traceback.print_stack(gt.gr_frame)
73
-        print()
74
-
75
-
76
-def _print_nativethreads():
77
-    for threadId, stack in sys._current_frames().items():
78
-        print(threadId)
79
-        traceback.print_stack(stack)
80
-        print()
81
-
82
-
83
-def _parse_port_range(port_range):
84
-    if ':' not in port_range:
85
-        start, end = port_range, port_range
86
-    else:
87
-        start, end = port_range.split(':', 1)
88
-    try:
89
-        start, end = int(start), int(end)
90
-        if end < start:
91
-            raise ValueError
92
-        return start, end
93
-    except ValueError as ex:
94
-        raise EventletBackdoorConfigValueError(port_range, ex,
95
-                                               help_for_backdoor_port)
96
-
97
-
98
-def _listen(host, start_port, end_port, listen_func):
99
-    try_port = start_port
100
-    while True:
101
-        try:
102
-            return listen_func((host, try_port))
103
-        except socket.error as exc:
104
-            if (exc.errno != errno.EADDRINUSE or
105
-               try_port >= end_port):
106
-                raise
107
-            try_port += 1
108
-
109
-
110
-def initialize_if_enabled():
111
-    backdoor_locals = {
112
-        'exit': _dont_use_this,      # So we don't exit the entire process
113
-        'quit': _dont_use_this,      # So we don't exit the entire process
114
-        'fo': _find_objects,
115
-        'pgt': _print_greenthreads,
116
-        'pnt': _print_nativethreads,
117
-    }
118
-
119
-    if CONF.backdoor_port is None:
120
-        return None
121
-
122
-    start_port, end_port = _parse_port_range(str(CONF.backdoor_port))
123
-
124
-    # NOTE(johannes): The standard sys.displayhook will print the value of
125
-    # the last expression and set it to __builtin__._, which overwrites
126
-    # the __builtin__._ that gettext sets. Let's switch to using pprint
127
-    # since it won't interact poorly with gettext, and it's easier to
128
-    # read the output too.
129
-    def displayhook(val):
130
-        if val is not None:
131
-            pprint.pprint(val)
132
-    sys.displayhook = displayhook
133
-
134
-    sock = _listen('localhost', start_port, end_port, eventlet.listen)
135
-
136
-    # In the case of backdoor port being zero, a port number is assigned by
137
-    # listen().  In any case, pull the port number out here.
138
-    port = sock.getsockname()[1]
139
-    LOG.info(
140
-        _LI('Eventlet backdoor listening on %(port)s for process %(pid)d') %
141
-        {'port': port, 'pid': os.getpid()}
142
-    )
143
-    eventlet.spawn_n(eventlet.backdoor.backdoor_server, sock,
144
-                     locals=backdoor_locals)
145
-    return port

+ 0
- 113
kiloeyes/openstack/common/excutils.py View File

@@ -1,113 +0,0 @@
1
-# Copyright 2011 OpenStack Foundation.
2
-# Copyright 2012, Red Hat, Inc.
3
-#
4
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
5
-#    not use this file except in compliance with the License. You may obtain
6
-#    a copy of the License at
7
-#
8
-#         http://www.apache.org/licenses/LICENSE-2.0
9
-#
10
-#    Unless required by applicable law or agreed to in writing, software
11
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13
-#    License for the specific language governing permissions and limitations
14
-#    under the License.
15
-
16
-"""
17
-Exception related utilities.
18
-"""
19
-
20
-import logging
21
-import sys
22
-import time
23
-import traceback
24
-
25
-import six
26
-
27
-from kiloeyes.openstack.common.gettextutils import _LE
28
-
29
-
30
-class save_and_reraise_exception(object):
31
-    """Save current exception, run some code and then re-raise.
32
-
33
-    In some cases the exception context can be cleared, resulting in None
34
-    being attempted to be re-raised after an exception handler is run. This
35
-    can happen when eventlet switches greenthreads or when running an
36
-    exception handler, code raises and catches an exception. In both
37
-    cases the exception context will be cleared.
38
-
39
-    To work around this, we save the exception state, run handler code, and
40
-    then re-raise the original exception. If another exception occurs, the
41
-    saved exception is logged and the new exception is re-raised.
42
-
43
-    In some cases the caller may not want to re-raise the exception, and
44
-    for those circumstances this context provides a reraise flag that
45
-    can be used to suppress the exception.  For example::
46
-
47
-      except Exception:
48
-          with save_and_reraise_exception() as ctxt:
49
-              decide_if_need_reraise()
50
-              if not should_be_reraised:
51
-                  ctxt.reraise = False
52
-
53
-    If another exception occurs and reraise flag is False,
54
-    the saved exception will not be logged.
55
-
56
-    If the caller wants to raise new exception during exception handling
57
-    he/she sets reraise to False initially with an ability to set it back to
58
-    True if needed::
59
-
60
-      except Exception:
61
-          with save_and_reraise_exception(reraise=False) as ctxt:
62
-              [if statements to determine whether to raise a new exception]
63
-              # Not raising a new exception, so reraise
64
-              ctxt.reraise = True
65
-    """
66
-    def __init__(self, reraise=True):
67
-        self.reraise = reraise
68
-
69
-    def __enter__(self):
70
-        self.type_, self.value, self.tb, = sys.exc_info()
71
-        return self
72
-
73
-    def __exit__(self, exc_type, exc_val, exc_tb):
74
-        if exc_type is not None:
75
-            if self.reraise:
76
-                logging.error(_LE('Original exception being dropped: %s'),
77
-                              traceback.format_exception(self.type_,
78
-                                                         self.value,
79
-                                                         self.tb))
80
-            return False
81
-        if self.reraise:
82
-            six.reraise(self.type_, self.value, self.tb)
83
-
84
-
85
-def forever_retry_uncaught_exceptions(infunc):
86
-    def inner_func(*args, **kwargs):
87
-        last_log_time = 0
88
-        last_exc_message = None
89
-        exc_count = 0
90
-        while True:
91
-            try:
92
-                return infunc(*args, **kwargs)
93
-            except Exception as exc:
94
-                this_exc_message = six.u(str(exc))
95
-                if this_exc_message == last_exc_message:
96
-                    exc_count += 1
97
-                else:
98
-                    exc_count = 1
99
-                # Do not log any more frequently than once a minute unless
100
-                # the exception message changes
101
-                cur_time = int(time.time())
102
-                if (cur_time - last_log_time > 60 or
103
-                        this_exc_message != last_exc_message):
104
-                    logging.exception(
105
-                        _LE('Unexpected exception occurred %d time(s)... '
106
-                            'retrying.') % exc_count)
107
-                    last_log_time = cur_time
108
-                    last_exc_message = this_exc_message
109
-                    exc_count = 0
110
-                # This should be a very rare event. In case it isn't, do
111
-                # a sleep.
112
-                time.sleep(1)
113
-    return inner_func

+ 0
- 146
kiloeyes/openstack/common/fileutils.py View File

@@ -1,146 +0,0 @@
1
-# Copyright 2011 OpenStack Foundation.
2
-# All Rights Reserved.
3
-#
4
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
5
-#    not use this file except in compliance with the License. You may obtain
6
-#    a copy of the License at
7
-#
8
-#         http://www.apache.org/licenses/LICENSE-2.0
9
-#
10
-#    Unless required by applicable law or agreed to in writing, software
11
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13
-#    License for the specific language governing permissions and limitations
14
-#    under the License.
15
-
16
-import contextlib
17
-import errno
18
-import os
19
-import tempfile
20
-
21
-from kiloeyes.openstack.common import excutils
22
-from oslo_log import log as logging
23
-
24
-LOG = logging.getLogger(__name__)
25
-
26
-_FILE_CACHE = {}
27
-
28
-
29
-def ensure_tree(path):
30
-    """Create a directory (and any ancestor directories required)
31
-
32
-    :param path: Directory to create
33
-    """
34
-    try:
35
-        os.makedirs(path)
36
-    except OSError as exc:
37
-        if exc.errno == errno.EEXIST:
38
-            if not os.path.isdir(path):
39
-                raise
40
-        else:
41
-            raise
42
-
43
-
44
-def read_cached_file(filename, force_reload=False):
45
-    """Read from a file if it has been modified.
46
-
47
-    :param force_reload: Whether to reload the file.
48
-    :returns: A tuple with a boolean specifying if the data is fresh
49
-              or not.
50
-    """
51
-    global _FILE_CACHE
52
-
53
-    if force_reload:
54
-        delete_cached_file(filename)
55
-
56
-    reloaded = False
57
-    mtime = os.path.getmtime(filename)
58
-    cache_info = _FILE_CACHE.setdefault(filename, {})
59
-
60
-    if not cache_info or mtime > cache_info.get('mtime', 0):
61
-        LOG.debug("Reloading cached file %s" % filename)
62
-        with open(filename) as fap:
63
-            cache_info['data'] = fap.read()
64
-        cache_info['mtime'] = mtime
65
-        reloaded = True
66
-    return (reloaded, cache_info['data'])
67
-
68
-
69
-def delete_cached_file(filename):
70
-    """Delete cached file if present.
71
-
72
-    :param filename: filename to delete
73
-    """
74
-    global _FILE_CACHE
75
-
76
-    if filename in _FILE_CACHE:
77
-        del _FILE_CACHE[filename]
78
-
79
-
80
-def delete_if_exists(path, remove=os.unlink):
81
-    """Delete a file, but ignore file not found error.
82
-
83
-    :param path: File to delete
84
-    :param remove: Optional function to remove passed path
85
-    """
86
-
87
-    try:
88
-        remove(path)
89
-    except OSError as e:
90
-        if e.errno != errno.ENOENT:
91
-            raise
92
-
93
-
94
-@contextlib.contextmanager
95
-def remove_path_on_error(path, remove=delete_if_exists):
96
-    """Protect code that wants to operate on PATH atomically.
97
-    Any exception will cause PATH to be removed.
98
-
99
-    :param path: File to work with
100
-    :param remove: Optional function to remove passed path
101
-    """
102
-
103
-    try:
104
-        yield
105
-    except Exception:
106
-        with excutils.save_and_reraise_exception():
107
-            remove(path)
108
-
109
-
110
-def file_open(*args, **kwargs):
111
-    """Open file
112
-
113
-    see built-in open() documentation for more details
114
-
115
-    Note: The reason this is kept in a separate module is to easily
116
-    be able to provide a stub module that doesn't alter system
117
-    state at all (for unit tests)
118
-    """
119
-    return open(*args, **kwargs)
120
-
121
-
122
-def write_to_tempfile(content, path=None, suffix='', prefix='tmp'):
123
-    """Create temporary file or use existing file.
124
-
125
-    This util is needed for creating temporary file with
126
-    specified content, suffix and prefix. If path is not None,
127
-    it will be used for writing content. If the path doesn't
128
-    exist it'll be created.
129
-
130
-    :param content: content for temporary file.
131
-    :param path: same as parameter 'dir' for mkstemp
132
-    :param suffix: same as parameter 'suffix' for mkstemp
133
-    :param prefix: same as parameter 'prefix' for mkstemp
134
-
135
-    For example: it can be used in database tests for creating
136
-    configuration files.
137
-    """
138
-    if path:
139
-        ensure_tree(path)
140
-
141
-    (fd, path) = tempfile.mkstemp(suffix=suffix, dir=path, prefix=prefix)
142
-    try:
143
-        os.write(fd, content)
144
-    finally:
145
-        os.close(fd)
146
-    return path

+ 0
- 0
kiloeyes/openstack/common/fixture/__init__.py View File


+ 0
- 85
kiloeyes/openstack/common/fixture/config.py View File

@@ -1,85 +0,0 @@
1
-#
2
-# Copyright 2013 Mirantis, Inc.
3
-# Copyright 2013 OpenStack Foundation
4
-# All Rights Reserved.
5
-#
6
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
7
-#    not use this file except in compliance with the License. You may obtain
8
-#    a copy of the License at
9
-#
10
-#         http://www.apache.org/licenses/LICENSE-2.0
11
-#
12
-#    Unless required by applicable law or agreed to in writing, software
13
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
14
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
15
-#    License for the specific language governing permissions and limitations
16
-#    under the License.
17
-
18
-import fixtures
19
-from oslo_config import cfg
20
-import six
21
-
22
-
23
-class Config(fixtures.Fixture):
24
-    """Allows overriding configuration settings for the test.
25
-
26
-    `conf` will be reset on cleanup.
27
-
28
-    """
29
-
30
-    def __init__(self, conf=cfg.CONF):
31
-        self.conf = conf
32
-
33
-    def setUp(self):
34
-        super(Config, self).setUp()
35
-        # NOTE(morganfainberg): unregister must be added to cleanup before
36
-        # reset is because cleanup works in reverse order of registered items,
37
-        # and a reset must occur before unregistering options can occur.
38
-        self.addCleanup(self._unregister_config_opts)
39
-        self.addCleanup(self.conf.reset)
40
-        self._registered_config_opts = {}
41
-
42
-    def config(self, **kw):
43
-        """Override configuration values.
44
-
45
-        The keyword arguments are the names of configuration options to
46
-        override and their values.
47
-
48
-        If a `group` argument is supplied, the overrides are applied to
49
-        the specified configuration option group, otherwise the overrides
50
-        are applied to the ``default`` group.
51
-
52
-        """
53
-
54
-        group = kw.pop('group', None)
55
-        for k, v in six.iteritems(kw):
56
-            self.conf.set_override(k, v, group)
57
-
58
-    def _unregister_config_opts(self):
59
-        for group in self._registered_config_opts:
60
-            self.conf.unregister_opts(self._registered_config_opts[group],
61
-                                      group=group)
62
-
63
-    def register_opt(self, opt, group=None):
64
-        """Register a single option for the test run.
65
-
66
-        Options registered in this manner will automatically be unregistered
67
-        during cleanup.
68
-
69
-        If a `group` argument is supplied, it will register the new option
70
-        to that group, otherwise the option is registered to the ``default``
71
-        group.
72
-        """
73
-        self.conf.register_opt(opt, group=group)
74
-        self._registered_config_opts.setdefault(group, set()).add(opt)
75
-
76
-    def register_opts(self, opts, group=None):
77
-        """Register multiple options for the test run.
78
-
79
-        This works in the same manner as register_opt() but takes a list of
80
-        options as the first argument. All arguments will be registered to the
81
-        same group if the ``group`` argument is supplied, otherwise all options
82
-        will be registered to the ``default`` group.
83
-        """
84
-        for opt in opts:
85
-            self.register_opt(opt, group=group)

+ 0
- 51
kiloeyes/openstack/common/fixture/lockutils.py View File

@@ -1,51 +0,0 @@
1
-# Copyright 2011 OpenStack Foundation.
2
-# All Rights Reserved.
3
-#
4
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
5
-#    not use this file except in compliance with the License. You may obtain
6
-#    a copy of the License at
7
-#
8
-#         http://www.apache.org/licenses/LICENSE-2.0
9
-#
10
-#    Unless required by applicable law or agreed to in writing, software
11
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13
-#    License for the specific language governing permissions and limitations
14
-#    under the License.
15
-
16
-import fixtures
17
-
18
-from kiloeyes.openstack.common import lockutils
19
-
20
-
21
-class LockFixture(fixtures.Fixture):
22
-    """External locking fixture.
23
-
24
-    This fixture is basically an alternative to the synchronized decorator with
25
-    the external flag so that tearDowns and addCleanups will be included in
26
-    the lock context for locking between tests. The fixture is recommended to
27
-    be the first line in a test method, like so::
28
-
29
-        def test_method(self):
30
-            self.useFixture(LockFixture)
31
-                ...
32
-
33
-    or the first line in setUp if all the test methods in the class are
34
-    required to be serialized. Something like::
35
-
36
-        class TestCase(testtools.testcase):
37
-            def setUp(self):
38
-                self.useFixture(LockFixture)
39
-                super(TestCase, self).setUp()
40
-                    ...
41
-
42
-    This is because addCleanups are put on a LIFO queue that gets run after the
43
-    test method exits. (either by completing or raising an exception)
44
-    """
45
-    def __init__(self, name, lock_file_prefix=None):
46
-        self.mgr = lockutils.lock(name, lock_file_prefix, True)
47
-
48
-    def setUp(self):
49
-        super(LockFixture, self).setUp()
50
-        self.addCleanup(self.mgr.__exit__, None, None, None)
51
-        self.lock = self.mgr.__enter__()

+ 0
- 34
kiloeyes/openstack/common/fixture/logging.py View File

@@ -1,34 +0,0 @@
1
-# All Rights Reserved.
2
-#
3
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
4
-# not use this file except in compliance with the License. You may obtain
5
-# a copy of the License at
6
-#
7
-#      http://www.apache.org/licenses/LICENSE-2.0
8
-#
9
-# Unless required by applicable law or agreed to in writing, software
10
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12
-# License for the specific language governing permissions and limitations
13
-# under the License.
14
-
15
-import fixtures
16
-
17
-
18
-def get_logging_handle_error_fixture():
19
-    """returns a fixture to make logging raise formatting exceptions.
20
-
21
-    Usage:
22
-    self.useFixture(logging.get_logging_handle_error_fixture())
23
-    """
24
-    return fixtures.MonkeyPatch('logging.Handler.handleError',
25
-                                _handleError)
26
-
27
-
28
-def _handleError(self, record):
29
-    """Monkey patch for logging.Handler.handleError.
30
-
31
-    The default handleError just logs the error to stderr but we want
32
-    the option of actually raising an exception.
33
-    """
34
-    raise

+ 0
- 62
kiloeyes/openstack/common/fixture/mockpatch.py View File

@@ -1,62 +0,0 @@
1
-# Copyright 2010 United States Government as represented by the
2
-# Administrator of the National Aeronautics and Space Administration.
3
-# Copyright 2013 Hewlett-Packard Development Company, L.P.
4
-# All Rights Reserved.
5
-#
6
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
7
-# not use this file except in compliance with the License. You may obtain
8
-# a copy of the License at
9
-#
10
-#      http://www.apache.org/licenses/LICENSE-2.0
11
-#
12
-# Unless required by applicable law or agreed to in writing, software
13
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
14
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
15
-# License for the specific language governing permissions and limitations
16
-# under the License.
17
-
18
-##############################################################################
19
-##############################################################################
20
-#
21
-# DO NOT MODIFY THIS FILE
22
-#
23
-# This file is being graduated to the oslotest library. Please make all
24
-# changes there, and only backport critical fixes here. - dhellmann
25
-#
26
-##############################################################################
27
-##############################################################################
28
-
29
-import fixtures
30
-import mock
31
-
32
-
33
-class PatchObject(fixtures.Fixture):
34
-    """Deal with code around mock."""
35
-
36
-    def __init__(self, obj, attr, new=mock.DEFAULT, **kwargs):
37
-        self.obj = obj
38
-        self.attr = attr
39
-        self.kwargs = kwargs
40
-        self.new = new
41
-
42
-    def setUp(self):
43
-        super(PatchObject, self).setUp()
44
-        _p = mock.patch.object(self.obj, self.attr, self.new, **self.kwargs)
45
-        self.mock = _p.start()
46
-        self.addCleanup(_p.stop)
47
-
48
-
49
-class Patch(fixtures.Fixture):
50
-
51
-    """Deal with code around mock.patch."""
52
-
53
-    def __init__(self, obj, new=mock.DEFAULT, **kwargs):
54
-        self.obj = obj
55
-        self.kwargs = kwargs
56
-        self.new = new
57
-
58
-    def setUp(self):
59
-        super(Patch, self).setUp()
60
-        _p = mock.patch(self.obj, self.new, **self.kwargs)
61
-        self.mock = _p.start()
62
-        self.addCleanup(_p.stop)

+ 0
- 43
kiloeyes/openstack/common/fixture/moxstubout.py View File

@@ -1,43 +0,0 @@
1
-# Copyright 2010 United States Government as represented by the
2
-# Administrator of the National Aeronautics and Space Administration.
3
-# Copyright 2013 Hewlett-Packard Development Company, L.P.
4
-# All Rights Reserved.
5
-#
6
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
7
-# not use this file except in compliance with the License. You may obtain
8
-# a copy of the License at
9
-#
10
-#      http://www.apache.org/licenses/LICENSE-2.0
11
-#
12
-# Unless required by applicable law or agreed to in writing, software
13
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
14
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
15
-# License for the specific language governing permissions and limitations
16
-# under the License.
17
-
18
-##############################################################################
19
-##############################################################################
20
-#
21
-# DO NOT MODIFY THIS FILE
22
-#
23
-# This file is being graduated to the oslotest library. Please make all
24
-# changes there, and only backport critical fixes here. - dhellmann
25
-#
26
-##############################################################################
27
-##############################################################################
28
-
29
-import fixtures
30
-from six.moves import mox
31
-
32
-
33
-class MoxStubout(fixtures.Fixture):
34
-    """Deal with code around mox and stubout as a fixture."""
35
-
36
-    def setUp(self):
37
-        super(MoxStubout, self).setUp()
38
-        # emulate some of the mox stuff, we can't use the metaclass
39
-        # because it screws with our generators
40
-        self.mox = mox.Mox()
41
-        self.stubs = self.mox.stubs
42
-        self.addCleanup(self.mox.UnsetStubs)
43
-        self.addCleanup(self.mox.VerifyAll)

+ 0
- 479
kiloeyes/openstack/common/gettextutils.py View File

@@ -1,479 +0,0 @@
1
-# Copyright 2012 Red Hat, Inc.
2
-# Copyright 2013 IBM Corp.
3
-# All Rights Reserved.
4
-#
5
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
6
-#    not use this file except in compliance with the License. You may obtain
7
-#    a copy of the License at
8
-#
9
-#         http://www.apache.org/licenses/LICENSE-2.0
10
-#
11
-#    Unless required by applicable law or agreed to in writing, software
12
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
13
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
14
-#    License for the specific language governing permissions and limitations
15
-#    under the License.
16
-
17
-"""
18
-gettext for openstack-common modules.
19
-
20
-Usual usage in an openstack.common module:
21
-
22
-    from kiloeyes.openstack.common.gettextutils import _
23
-"""
24
-
25
-import copy
26
-import gettext
27
-import locale
28
-from logging import handlers
29
-import os
30
-
31
-from babel import localedata
32
-import six
33
-
34
-_AVAILABLE_LANGUAGES = {}
35
-
36
-# FIXME(dhellmann): Remove this when moving to oslo.i18n.
37
-USE_LAZY = False
38
-
39
-
40
-class TranslatorFactory(object):
41
-    """Create translator functions
42
-    """
43
-
44
-    def __init__(self, domain, localedir=None):
45
-        """Establish a set of translation functions for the domain.
46
-
47
-        :param domain: Name of translation domain,
48
-                       specifying a message catalog.
49
-        :type domain: str
50
-        :param lazy: Delays translation until a message is emitted.
51
-                     Defaults to False.
52
-        :type lazy: Boolean
53
-        :param localedir: Directory with translation catalogs.
54
-        :type localedir: str
55
-        """
56
-        self.domain = domain
57
-        if localedir is None:
58
-            localedir = os.environ.get(domain.upper() + '_LOCALEDIR')
59
-        self.localedir = localedir
60
-
61
-    def _make_translation_func(self, domain=None):
62
-        """Return a new translation function ready for use.
63
-
64
-        Takes into account whether or not lazy translation is being
65
-        done.
66
-
67
-        The domain can be specified to override the default from the
68
-        factory, but the localedir from the factory is always used
69
-        because we assume the log-level translation catalogs are
70
-        installed in the same directory as the main application
71
-        catalog.
72
-
73
-        """
74
-        if domain is None:
75
-            domain = self.domain
76
-        t = gettext.translation(domain,
77
-                                localedir=self.localedir,
78
-                                fallback=True)
79
-        # Use the appropriate method of the translation object based
80
-        # on the python version.
81
-        m = t.gettext if six.PY3 else t.ugettext
82
-
83
-        def f(msg):
84
-            """oslo.i18n.gettextutils translation function."""
85
-            if USE_LAZY:
86
-                return Message(msg, domain=domain)
87
-            return m(msg)
88
-        return f
89
-
90
-    @property
91
-    def primary(self):
92
-        "The default translation function."
93
-        return self._make_translation_func()
94
-
95
-    def _make_log_translation_func(self, level):
96
-        return self._make_translation_func(self.domain + '-log-' + level)
97
-
98
-    @property
99
-    def log_info(self):
100
-        "Translate info-level log messages."
101
-        return self._make_log_translation_func('info')
102
-
103
-    @property
104
-    def log_warning(self):
105
-        "Translate warning-level log messages."
106
-        return self._make_log_translation_func('warning')
107
-
108
-    @property
109
-    def log_error(self):
110
-        "Translate error-level log messages."
111
-        return self._make_log_translation_func('error')
112
-
113
-    @property
114
-    def log_critical(self):
115
-        "Translate critical-level log messages."
116
-        return self._make_log_translation_func('critical')
117
-
118
-
119
-# NOTE(dhellmann): When this module moves out of the incubator into
120
-# oslo.i18n, these global variables can be moved to an integration
121
-# module within each application.
122
-
123
-# Create the global translation functions.
124
-_translators = TranslatorFactory('kiloeyes')
125
-
126
-# The primary translation function using the well-known name "_"
127
-_ = _translators.primary
128
-
129
-# Translators for log levels.
130
-#
131
-# The abbreviated names are meant to reflect the usual use of a short
132
-# name like '_'. The "L" is for "log" and the other letter comes from
133
-# the level.
134
-_LI = _translators.log_info
135
-_LW = _translators.log_warning
136
-_LE = _translators.log_error
137
-_LC = _translators.log_critical
138
-
139
-# NOTE(dhellmann): End of globals that will move to the application's
140
-# integration module.
141
-
142
-
143
-def enable_lazy():
144
-    """Convenience function for configuring _() to use lazy gettext
145
-
146
-    Call this at the start of execution to enable the gettextutils._
147
-    function to use lazy gettext functionality. This is useful if
148
-    your project is importing _ directly instead of using the
149
-    gettextutils.install() way of importing the _ function.
150
-    """
151
-    global USE_LAZY
152
-    USE_LAZY = True
153
-
154
-
155
-def install(domain):
156
-    """Install a _() function using the given translation domain.
157
-
158
-    Given a translation domain, install a _() function using gettext's
159
-    install() function.
160
-
161
-    The main difference from gettext.install() is that we allow
162
-    overriding the default localedir (e.g. /usr/share/locale) using
163
-    a translation-domain-specific environment variable (e.g.
164
-    NOVA_LOCALEDIR).
165
-
166
-    Note that to enable lazy translation, enable_lazy must be
167
-    called.
168
-
169
-    :param domain: the translation domain
170
-    """
171
-    from six import moves
172
-    tf = TranslatorFactory(domain)
173
-    moves.builtins.__dict__['_'] = tf.primary
174
-
175
-
176
-class Message(six.text_type):
177
-    """A Message object is a unicode object that can be translated.
178
-
179
-    Translation of Message is done explicitly using the translate() method.
180
-    For all non-translation intents and purposes, a Message is simply unicode,
181
-    and can be treated as such.
182
-    """
183
-
184
-    def __new__(cls, msgid, msgtext=None, params=None,
185
-                domain='kiloeyes', *args):
186
-        """Create a new Message object.
187
-
188
-        In order for translation to work gettext requires a message ID, this
189
-        msgid will be used as the base unicode text. It is also possible
190
-        for the msgid and the base unicode text to be different by passing
191
-        the msgtext parameter.
192
-        """
193
-        # If the base msgtext is not given, we use the default translation
194
-        # of the msgid (which is in English) just in case the system locale is
195
-        # not English, so that the base text will be in that locale by default.
196
-        if not msgtext:
197
-            msgtext = Message._translate_msgid(msgid, domain)
198
-        # We want to initialize the parent unicode with the actual object that
199
-        # would have been plain unicode if 'Message' was not enabled.
200
-        msg = super(Message, cls).__new__(cls, msgtext)
201
-        msg.msgid = msgid
202
-        msg.domain = domain
203
-        msg.params = params
204
-        return msg
205
-
206
-    def translate(self, desired_locale=None):
207
-        """Translate this message to the desired locale.
208
-
209
-        :param desired_locale: The desired locale to translate the message to,
210
-                               if no locale is provided the message will be
211
-                               translated to the system's default locale.
212
-
213
-        :returns: the translated message in unicode
214
-        """
215
-
216
-        translated_message = Message._translate_msgid(self.msgid,
217
-                                                      self.domain,
218
-                                                      desired_locale)
219
-        if self.params is None:
220
-            # No need for more translation
221
-            return translated_message
222
-
223
-        # This Message object may have been formatted with one or more
224
-        # Message objects as substitution arguments, given either as a single
225
-        # argument, part of a tuple, or as one or more values in a dictionary.
226
-        # When translating this Message we need to translate those Messages too
227
-        translated_params = _translate_args(self.params, desired_locale)
228
-
229
-        translated_message = translated_message % translated_params
230
-
231
-        return translated_message
232
-
233
-    @staticmethod
234
-    def _translate_msgid(msgid, domain, desired_locale=None):
235
-        if not desired_locale:
236
-            system_locale = locale.getdefaultlocale()
237
-            # If the system locale is not available to the runtime use English
238
-            if not system_locale[0]:
239
-                desired_locale = 'en_US'
240
-            else:
241
-                desired_locale = system_locale[0]
242
-
243
-        locale_dir = os.environ.get(domain.upper() + '_LOCALEDIR')
244
-        lang = gettext.translation(domain,
245
-                                   localedir=locale_dir,
246
-                                   languages=[desired_locale],
247
-                                   fallback=True)
248
-        if six.PY3:
249
-            translator = lang.gettext
250
-        else:
251
-            translator = lang.ugettext
252
-
253
-        translated_message = translator(msgid)
254
-        return translated_message
255
-
256
-    def __mod__(self, other):
257
-        # When we mod a Message we want the actual operation to be performed
258
-        # by the parent class (i.e. unicode()), the only thing  we do here is
259
-        # save the original msgid and the parameters in case of a translation
260
-        params = self._sanitize_mod_params(other)
261
-        unicode_mod = super(Message, self).__mod__(params)
262
-        modded = Message(self.msgid,
263
-                         msgtext=unicode_mod,
264
-                         params=params,
265
-                         domain=self.domain)
266
-        return modded
267
-
268
-    def _sanitize_mod_params(self, other):
269
-        """Sanitize the object being modded with this Message.
270
-
271
-        - Add support for modding 'None' so translation supports it
272
-        - Trim the modded object, which can be a large dictionary, to only
273
-        those keys that would actually be used in a translation
274
-        - Snapshot the object being modded, in case the message is
275
-        translated, it will be used as it was when the Message was created
276
-        """
277
-        if other is None:
278
-            params = (other,)
279
-        elif isinstance(other, dict):
280
-            # Merge the dictionaries
281
-            # Copy each item in case one does not support deep copy.
282
-            params = {}
283
-            if isinstance(self.params, dict):
284
-                for key, val in self.params.items():
285
-                    params[key] = self._copy_param(val)
286
-            for key, val in other.items():
287
-                params[key] = self._copy_param(val)
288
-        else:
289
-            params = self._copy_param(other)
290
-        return params
291
-
292
-    def _copy_param(self, param):
293
-        try:
294
-            return copy.deepcopy(param)
295
-        except Exception:
296
-            # Fallback to casting to unicode this will handle the
297
-            # python code-like objects that can't be deep-copied
298
-            return six.text_type(param)
299
-
300
-    def __add__(self, other):
301
-        msg = _('Message objects do not support addition.')
302
-        raise TypeError(msg)
303
-
304
-    def __radd__(self, other):
305
-        return self.__add__(other)
306
-
307
-    if six.PY2:
308
-        def __str__(self):
309
-            # NOTE(luisg): Logging in python 2.6 tries to str() log records,
310
-            # and it expects specifically a UnicodeError in order to proceed.
311
-            msg = _('Message objects do not support str() because they may '
312
-                    'contain non-ascii characters. '
313
-                    'Please use unicode() or translate() instead.')
314
-            raise UnicodeError(msg)
315
-
316
-
317
-def get_available_languages(domain):
318
-    """Lists the available languages for the given translation domain.
319
-
320
-    :param domain: the domain to get languages for
321
-    """
322
-    if domain in _AVAILABLE_LANGUAGES:
323
-        return copy.copy(_AVAILABLE_LANGUAGES[domain])
324
-
325
-    localedir = '%s_LOCALEDIR' % domain.upper()
326
-    find = lambda x: gettext.find(domain,
327
-                                  localedir=os.environ.get(localedir),
328
-                                  languages=[x])
329
-
330
-    # NOTE(mrodden): en_US should always be available (and first in case
331
-    # order matters) since our in-line message strings are en_US
332
-    language_list = ['en_US']
333
-    # NOTE(luisg): Babel <1.0 used a function called list(), which was
334
-    # renamed to locale_identifiers() in >=1.0, the requirements master list
335
-    # requires >=0.9.6, uncapped, so defensively work with both. We can remove
336
-    # this check when the master list updates to >=1.0, and update all projects
337
-    list_identifiers = (getattr(localedata, 'list', None) or
338
-                        getattr(localedata, 'locale_identifiers'))
339
-    locale_identifiers = list_identifiers()
340
-
341
-    for i in locale_identifiers:
342
-        if find(i) is not None:
343
-            language_list.append(i)
344
-
345
-    # NOTE(luisg): Babel>=1.0,<1.3 has a bug where some OpenStack supported
346
-    # locales (e.g. 'zh_CN', and 'zh_TW') aren't supported even though they
347
-    # are perfectly legitimate locales:
348
-    #     https://github.com/mitsuhiko/babel/issues/37
349
-    # In Babel 1.3 they fixed the bug and they support these locales, but
350
-    # they are still not explicitly "listed" by locale_identifiers().
351
-    # That is  why we add the locales here explicitly if necessary so that
352
-    # they are listed as supported.
353
-    aliases = {'zh': 'zh_CN',
354
-               'zh_Hant_HK': 'zh_HK',
355
-               'zh_Hant': 'zh_TW',
356
-               'fil': 'tl_PH'}
357
-    for (locale_, alias) in six.iteritems(aliases):
358
-        if locale_ in language_list and alias not in language_list:
359
-            language_list.append(alias)
360
-
361
-    _AVAILABLE_LANGUAGES[domain] = language_list
362
-    return copy.copy(language_list)
363
-
364
-
365
-def translate(obj, desired_locale=None):
366
-    """Gets the translated unicode representation of the given object.
367
-
368
-    If the object is not translatable it is returned as-is.
369
-    If the locale is None the object is translated to the system locale.
370
-
371
-    :param obj: the object to translate
372
-    :param desired_locale: the locale to translate the message to, if None the
373
-                           default system locale will be used
374
-    :returns: the translated object in unicode, or the original object if
375
-              it could not be translated
376
-    """
377
-    message = obj
378
-    if not isinstance(message, Message):
379
-        # If the object to translate is not already translatable,
380
-        # let's first get its unicode representation
381
-        message = six.text_type(obj)
382
-    if isinstance(message, Message):
383
-        # Even after unicoding() we still need to check if we are
384
-        # running with translatable unicode before translating
385
-        return message.translate(desired_locale)
386
-    return obj
387
-
388
-
389
-def _translate_args(args, desired_locale=None):
390
-    """Translates all the translatable elements of the given arguments object.
391
-
392
-    This method is used for translating the translatable values in method
393
-    arguments which include values of tuples or dictionaries.
394
-    If the object is not a tuple or a dictionary the object itself is
395
-    translated if it is translatable.
396
-
397
-    If the locale is None the object is translated to the system locale.
398
-
399
-    :param args: the args to translate
400
-    :param desired_locale: the locale to translate the args to, if None the
401
-                           default system locale will be used
402
-    :returns: a new args object with the translated contents of the original
403
-    """
404
-    if isinstance(args, tuple):
405
-        return tuple(translate(v, desired_locale) for v in args)
406
-    if isinstance(args, dict):
407
-        translated_dict = {}
408
-        for (k, v) in six.iteritems(args):
409
-            translated_v = translate(v, desired_locale)
410
-            translated_dict[k] = translated_v
411
-        return translated_dict
412
-    return translate(args, desired_locale)
413
-
414
-
415
-class TranslationHandler(handlers.MemoryHandler):
416
-    """Handler that translates records before logging them.
417
-
418
-    The TranslationHandler takes a locale and a target logging.Handler object
419
-    to forward LogRecord objects to after translating them. This handler
420
-    depends on Message objects being logged, instead of regular strings.
421
-
422
-    The handler can be configured declaratively in the logging.conf as follows:
423
-
424
-        [handlers]
425
-        keys = translatedlog, translator
426
-
427
-        [handler_translatedlog]
428
-        class = handlers.WatchedFileHandler
429
-        args = ('/var/log/api-localized.log',)
430
-        formatter = context
431
-
432
-        [handler_translator]
433
-        class = openstack.common.log.TranslationHandler
434
-        target = translatedlog
435
-        args = ('zh_CN',)
436
-
437
-    If the specified locale is not available in the system, the handler will
438
-    log in the default locale.
439
-    """
440
-
441
-    def __init__(self, locale=None, target=None):
442
-        """Initialize a TranslationHandler
443
-
444
-        :param locale: locale to use for translating messages
445
-        :param target: logging.Handler object to forward
446
-                       LogRecord objects to after translation
447
-        """
448
-        # NOTE(luisg): In order to allow this handler to be a wrapper for
449
-        # other handlers, such as a FileHandler, and still be able to
450
-        # configure it using logging.conf, this handler has to extend
451
-        # MemoryHandler because only the MemoryHandlers' logging.conf
452
-        # parsing is implemented such that it accepts a target handler.
453
-        handlers.MemoryHandler.__init__(self, capacity=0, target=target)
454
-        self.locale = locale
455
-
456
-    def setFormatter(self, fmt):
457
-        self.target.setFormatter(fmt)
458
-
459
-    def emit(self, record):
460
-        # We save the message from the original record to restore it
461
-        # after translation, so other handlers are not affected by this
462
-        original_msg = record.msg
463
-        original_args = record.args
464
-
465
-        try:
466
-            self._translate_and_log_record(record)
467
-        finally:
468
-            record.msg = original_msg
469
-            record.args = original_args
470
-
471
-    def _translate_and_log_record(self, record):
472
-        record.msg = translate(record.msg, self.locale)
473
-
474
-        # In addition to translating the message, we also need to translate
475
-        # arguments that were passed to the log method that were not part
476
-        # of the main message e.g., log.info(_('Some message %s'), this_one))
477
-        record.args = _translate_args(record.args, self.locale)
478
-
479
-        self.target.emit(record)

+ 0
- 73
kiloeyes/openstack/common/importutils.py View File

@@ -1,73 +0,0 @@
1
-# Copyright 2011 OpenStack Foundation.
2
-# All Rights Reserved.
3
-#
4
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
5
-#    not use this file except in compliance with the License. You may obtain
6
-#    a copy of the License at
7
-#
8
-#         http://www.apache.org/licenses/LICENSE-2.0
9
-#
10
-#    Unless required by applicable law or agreed to in writing, software
11
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13
-#    License for the specific language governing permissions and limitations
14
-#    under the License.
15
-
16
-"""
17
-Import related utilities and helper functions.
18
-"""
19
-
20
-import sys
21
-import traceback
22
-
23
-
24
-def import_class(import_str):
25
-    """Returns a class from a string including module and class."""
26
-    mod_str, _sep, class_str = import_str.rpartition('.')
27
-    __import__(mod_str)
28
-    try:
29
-        return getattr(sys.modules[mod_str], class_str)
30
-    except AttributeError:
31
-        raise ImportError('Class %s cannot be found (%s)' %
32
-                          (class_str,
33
-                           traceback.format_exception(*sys.exc_info())))
34
-
35
-
36
-def import_object(import_str, *args, **kwargs):
37
-    """Import a class and return an instance of it."""
38
-    return import_class(import_str)(*args, **kwargs)
39
-
40
-
41
-def import_object_ns(name_space, import_str, *args, **kwargs):
42
-    """Tries to import object from default namespace.
43
-
44
-    Imports a class and return an instance of it, first by trying
45
-    to find the class in a default namespace, then failing back to
46
-    a full path if not found in the default namespace.
47
-    """
48
-    import_value = "%s.%s" % (name_space, import_str)
49
-    try:
50
-        return import_class(import_value)(*args, **kwargs)
51
-    except ImportError:
52
-        return import_class(import_str)(*args, **kwargs)
53
-
54
-
55
-def import_module(import_str):
56
-    """Import a module."""
57
-    __import__(import_str)
58
-    return sys.modules[import_str]
59
-
60
-
61
-def import_versioned_module(version, submodule=None):
62
-    module = 'kiloeyes.v%s' % version
63
-    if submodule:
64
-        module = '.'.join((module, submodule))
65
-    return import_module(module)
66
-
67
-
68
-def try_import(import_str, default=None):
69
-    """Try to import a module and if it fails return default."""
70
-    try:
71
-        return import_module(import_str)
72
-    except ImportError:
73
-        return default

+ 0
- 202
kiloeyes/openstack/common/jsonutils.py View File

@@ -1,202 +0,0 @@
1
-# Copyright 2010 United States Government as represented by the
2
-# Administrator of the National Aeronautics and Space Administration.
3
-# Copyright 2011 Justin Santa Barbara
4
-# All Rights Reserved.
5
-#
6
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
7
-#    not use this file except in compliance with the License. You may obtain
8
-#    a copy of the License at
9
-#
10
-#         http://www.apache.org/licenses/LICENSE-2.0
11
-#
12
-#    Unless required by applicable law or agreed to in writing, software
13
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
14
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
15
-#    License for the specific language governing permissions and limitations
16
-#    under the License.
17
-
18
-'''
19
-JSON related utilities.
20
-
21
-This module provides a few things:
22
-
23
-    1) A handy function for getting an object down to something that can be
24
-    JSON serialized.  See to_primitive().
25
-
26
-    2) Wrappers around loads() and dumps().  The dumps() wrapper will
27
-    automatically use to_primitive() for you if needed.
28
-
29
-    3) This sets up anyjson to use the loads() and dumps() wrappers if anyjson
30
-    is available.
31
-'''
32
-
33
-
34
-import codecs
35
-import datetime
36
-import functools
37
-import inspect
38
-import itertools
39
-import sys
40
-
41
-is_simplejson = False
42
-if sys.version_info < (2, 7):
43
-    # On Python <= 2.6, json module is not C boosted, so try to use
44
-    # simplejson module if available
45
-    try:
46
-        import simplejson as json
47
-        # NOTE(mriedem): Make sure we have a new enough version of simplejson
48
-        # to support the namedobject_as_tuple argument. This can be removed
49
-        # in the Kilo release when python 2.6 support is dropped.
50
-        if 'namedtuple_as_object' in inspect.getargspec(json.dumps).args:
51
-            is_simplejson = True
52
-        else:
53
-            import json
54
-    except ImportError:
55
-        import json
56
-else:
57
-    import json
58
-
59
-import six
60
-import six.moves.xmlrpc_client as xmlrpclib
61
-
62
-from kiloeyes.openstack.common import gettextutils
63
-from kiloeyes.openstack.common import importutils
64
-from kiloeyes.openstack.common import strutils
65
-from kiloeyes.openstack.common import timeutils
66
-
67
-netaddr = importutils.try_import("netaddr")
68
-
69
-_nasty_type_tests = [inspect.ismodule, inspect.isclass, inspect.ismethod,
70
-                     inspect.isfunction, inspect.isgeneratorfunction,
71
-                     inspect.isgenerator, inspect.istraceback, inspect.isframe,
72
-                     inspect.iscode, inspect.isbuiltin, inspect.isroutine,
73
-                     inspect.isabstract]
74
-
75
-_simple_types = (six.string_types + six.integer_types
76
-                 + (type(None), bool, float))
77
-
78
-
79
-def to_primitive(value, convert_instances=False, convert_datetime=True,
80
-                 level=0, max_depth=3):
81
-    """Convert a complex object into primitives.
82
-
83
-    Handy for JSON serialization. We can optionally handle instances,
84
-    but since this is a recursive function, we could have cyclical
85
-    data structures.
86
-
87
-    To handle cyclical data structures we could track the actual objects
88
-    visited in a set, but not all objects are hashable. Instead we just
89
-    track the depth of the object inspections and don't go too deep.
90
-
91
-    Therefore, convert_instances=True is lossy ... be aware.
92
-
93
-    """
94
-    # handle obvious types first - order of basic types determined by running
95
-    # full tests on nova project, resulting in the following counts:
96
-    # 572754 <type 'NoneType'>
97
-    # 460353 <type 'int'>
98
-    # 379632 <type 'unicode'>
99
-    # 274610 <type 'str'>
100
-    # 199918 <type 'dict'>
101
-    # 114200 <type 'datetime.datetime'>
102
-    #  51817 <type 'bool'>
103
-    #  26164 <type 'list'>
104
-    #   6491 <type 'float'>
105
-    #    283 <type 'tuple'>
106
-    #     19 <type 'long'>
107
-    if isinstance(value, _simple_types):
108
-        return value
109
-
110
-    if isinstance(value, datetime.datetime):
111
-        if convert_datetime:
112
-            return timeutils.strtime(value)
113
-        else:
114
-            return value
115
-
116
-    # value of itertools.count doesn't get caught by nasty_type_tests
117
-    # and results in infinite loop when list(value) is called.
118
-    if type(value) == itertools.count:
119
-        return six.text_type(value)
120
-
121
-    # FIXME(vish): Workaround for LP bug 852095. Without this workaround,
122
-    #              tests that raise an exception in a mocked method that
123
-    #              has a @wrap_exception with a notifier will fail. If
124
-    #              we up the dependency to 0.5.4 (when it is released) we
125
-    #              can remove this workaround.
126
-    if getattr(value, '__module__', None) == 'mox':
127
-        return 'mock'
128
-
129
-    if level > max_depth:
130
-        return '?'
131
-
132
-    # The try block may not be necessary after the class check above,
133
-    # but just in case ...
134
-    try:
135
-        recursive = functools.partial(to_primitive,
136
-                                      convert_instances=convert_instances,
137
-                                      convert_datetime=convert_datetime,
138
-                                      level=level,
139
-                                      max_depth=max_depth)
140
-        if isinstance(value, dict):
141
-            return dict((k, recursive(v)) for k, v in six.iteritems(value))
142
-        elif isinstance(value, (list, tuple)):
143
-            return [recursive(lv) for lv in value]
144
-
145
-        # It's not clear why xmlrpclib created their own DateTime type, but
146
-        # for our purposes, make it a datetime type which is explicitly
147
-        # handled
148
-        if isinstance(value, xmlrpclib.DateTime):
149
-            value = datetime.datetime(*tuple(value.timetuple())[:6])
150
-
151
-        if convert_datetime and isinstance(value, datetime.datetime):
152
-            return timeutils.strtime(value)
153
-        elif isinstance(value, gettextutils.Message):
154
-            return value.data
155
-        elif hasattr(value, 'iteritems'):
156
-            return recursive(dict(value.iteritems()), level=level + 1)
157
-        elif hasattr(value, '__iter__'):
158
-            return recursive(list(value))
159
-        elif convert_instances and hasattr(value, '__dict__'):
160
-            # Likely an instance of something. Watch for cycles.
161
-            # Ignore class member vars.
162
-            return recursive(value.__dict__, level=level + 1)
163
-        elif netaddr and isinstance(value, netaddr.IPAddress):
164
-            return six.text_type(value)
165
-        else:
166
-            if any(test(value) for test in _nasty_type_tests):
167
-                return six.text_type(value)
168
-            return value
169
-    except TypeError:
170
-        # Class objects are tricky since they may define something like
171
-        # __iter__ defined but it isn't callable as list().
172
-        return six.text_type(value)
173
-
174
-
175
-def dumps(value, default=to_primitive, **kwargs):
176
-    if is_simplejson:
177
-        kwargs['namedtuple_as_object'] = False
178
-    return json.dumps(value, default=default, **kwargs)
179
-
180
-
181
-def dump(obj, fp, *args, **kwargs):
182
-    if is_simplejson:
183
-        kwargs['namedtuple_as_object'] = False
184
-    return json.dump(obj, fp, *args, **kwargs)
185
-
186
-
187
-def loads(s, encoding='utf-8', **kwargs):
188
-    return json.loads(strutils.safe_decode(s, encoding), **kwargs)
189
-
190
-
191
-def load(fp, encoding='utf-8', **kwargs):
192
-    return json.load(codecs.getreader(encoding)(fp), **kwargs)
193
-
194
-
195
-try:
196
-    import anyjson
197
-except ImportError:
198
-    pass
199
-else:
200
-    anyjson._modules.append((__name__, 'dumps', TypeError,
201
-                                       'loads', ValueError, 'load'))
202
-    anyjson.force_implementation(__name__)

+ 0
- 45
kiloeyes/openstack/common/local.py View File

@@ -1,45 +0,0 @@
1
-# Copyright 2011 OpenStack Foundation.
2
-# All Rights Reserved.
3
-#
4
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
5
-#    not use this file except in compliance with the License. You may obtain
6
-#    a copy of the License at
7
-#
8
-#         http://www.apache.org/licenses/LICENSE-2.0
9
-#
10
-#    Unless required by applicable law or agreed to in writing, software
11
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13
-#    License for the specific language governing permissions and limitations
14
-#    under the License.
15
-
16
-"""Local storage of variables using weak references"""
17
-
18
-import threading
19
-import weakref
20
-
21
-
22
-class WeakLocal(threading.local):
23
-    def __getattribute__(self, attr):
24
-        rval = super(WeakLocal, self).__getattribute__(attr)
25
-        if rval:
26
-            # NOTE(mikal): this bit is confusing. What is stored is a weak
27
-            # reference, not the value itself. We therefore need to lookup
28
-            # the weak reference and return the inner value here.
29
-            rval = rval()
30
-        return rval
31
-
32
-    def __setattr__(self, attr, value):
33
-        value = weakref.ref(value)
34
-        return super(WeakLocal, self).__setattr__(attr, value)
35
-
36
-
37
-# NOTE(mikal): the name "store" should be deprecated in the future
38
-store = WeakLocal()
39
-
40
-# A "weak" store uses weak references and allows an object to fall out of scope
41
-# when it falls out of scope in the code that uses the thread local storage. A
42
-# "strong" store will hold a reference to the object so that it never falls out
43
-# of scope.
44
-weak_store = WeakLocal()
45
-strong_store = threading.local()

+ 0
- 322
kiloeyes/openstack/common/lockutils.py View File

@@ -1,322 +0,0 @@
1
-# Copyright 2011 OpenStack Foundation.
2
-# All Rights Reserved.
3
-#
4
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
5
-#    not use this file except in compliance with the License. You may obtain
6
-#    a copy of the License at
7
-#
8
-#         http://www.apache.org/licenses/LICENSE-2.0
9
-#
10
-#    Unless required by applicable law or agreed to in writing, software
11
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13
-#    License for the specific language governing permissions and limitations
14
-#    under the License.
15
-
16
-import contextlib
17
-import errno
18
-import functools
19
-import logging
20
-import os
21
-import shutil
22
-import subprocess
23
-import sys
24
-import tempfile
25
-import threading
26
-import time
27
-import weakref
28
-
29
-from oslo_config import cfg
30
-
31
-from kiloeyes.openstack.common import fileutils
32
-from kiloeyes.openstack.common.gettextutils import _, _LE, _LI
33
-
34
-
35
-LOG = logging.getLogger(__name__)
36
-
37
-
38
-util_opts = [
39
-    cfg.BoolOpt('disable_process_locking', default=False,
40
-                help='Enables or disables inter-process locks.'),
41
-    cfg.StrOpt('lock_path',
42
-               default=os.environ.get("MONASCA_LOCK_PATH"),
43
-               help='Directory to use for lock files.')
44
-]
45
-
46
-
47
-CONF = cfg.CONF
48
-CONF.register_opts(util_opts)
49
-
50
-
51
-def set_defaults(lock_path):
52
-    cfg.set_defaults(util_opts, lock_path=lock_path)
53
-
54
-
55
-class _FileLock(object):
56
-    """Lock implementation which allows multiple locks, working around
57
-    issues like bugs.debian.org/cgi-bin/bugreport.cgi?bug=632857 and does
58
-    not require any cleanup. Since the lock is always held on a file
59
-    descriptor rather than outside of the process, the lock gets dropped
60
-    automatically if the process crashes, even if __exit__ is not executed.
61
-
62
-    There are no guarantees regarding usage by multiple green threads in a
63
-    single process here. This lock works only between processes. Exclusive
64
-    access between local threads should be achieved using the semaphores
65
-    in the @synchronized decorator.
66
-
67
-    Note these locks are released when the descriptor is closed, so it's not
68
-    safe to close the file descriptor while another green thread holds the
69
-    lock. Just opening and closing the lock file can break synchronisation,
70
-    so lock files must be accessed only using this abstraction.
71
-    """
72
-
73
-    def __init__(self, name):
74
-        self.lockfile = None
75
-        self.fname = name
76
-
77
-    def acquire(self):
78
-        basedir = os.path.dirname(self.fname)
79
-
80
-        if not os.path.exists(basedir):
81
-            fileutils.ensure_tree(basedir)
82
-            LOG.info(_LI('Created lock path: %s'), basedir)
83
-
84
-        self.lockfile = open(self.fname, 'w')
85
-
86
-        while True:
87
-            try:
88
-                # Using non-blocking locks since green threads are not
89
-                # patched to deal with blocking locking calls.
90
-                # Also upon reading the MSDN docs for locking(), it seems
91
-                # to have a laughable 10 attempts "blocking" mechanism.
92
-                self.trylock()
93
-                LOG.debug('Got file lock "%s"', self.fname)
94
-                return True
95
-            except IOError as e:
96
-                if e.errno in (errno.EACCES, errno.EAGAIN):
97
-                    # external locks synchronise things like iptables
98
-                    # updates - give it some time to prevent busy spinning
99
-                    time.sleep(0.01)
100
-                else:
101
-                    raise threading.ThreadError(_("Unable to acquire lock on"
102
-                                                  " `%(filename)s` due to"
103
-                                                  " %(exception)s") %
104
-                                                {'filename': self.fname,
105
-                                                    'exception': e})
106
-
107
-    def __enter__(self):
108
-        self.acquire()
109
-        return self
110
-
111
-    def release(self):
112
-        try:
113
-            self.unlock()
114
-            self.lockfile.close()
115
-            LOG.debug('Released file lock "%s"', self.fname)
116
-        except IOError:
117
-            LOG.exception(_LE("Could not release the acquired lock `%s`"),
118
-                          self.fname)
119
-
120
-    def __exit__(self, exc_type, exc_val, exc_tb):
121
-        self.release()
122
-
123
-    def exists(self):
124
-        return os.path.exists(self.fname)
125
-
126
-    def trylock(self):
127
-        raise NotImplementedError()
128
-
129
-    def unlock(self):
130
-        raise NotImplementedError()
131
-
132
-
133
-class _WindowsLock(_FileLock):
134
-    def trylock(self):
135
-        msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_NBLCK, 1)
136
-
137
-    def unlock(self):
138
-        msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_UNLCK, 1)
139
-
140
-
141
-class _FcntlLock(_FileLock):
142
-    def trylock(self):
143
-        fcntl.lockf(self.lockfile, fcntl.LOCK_EX | fcntl.LOCK_NB)
144
-
145
-    def unlock(self):
146
-        fcntl.lockf(self.lockfile, fcntl.LOCK_UN)
147
-
148
-
149
-if os.name == 'nt':
150
-    import msvcrt
151
-    InterProcessLock = _WindowsLock
152
-else:
153
-    import fcntl
154
-    InterProcessLock = _FcntlLock
155
-
156
-_semaphores = weakref.WeakValueDictionary()
157
-_semaphores_lock = threading.Lock()
158
-
159
-
160
-def _get_lock_path(name, lock_file_prefix, lock_path=None):
161
-    # NOTE(mikal): the lock name cannot contain directory
162
-    # separators
163
-    name = name.replace(os.sep, '_')
164
-    if lock_file_prefix:
165
-        sep = '' if lock_file_prefix.endswith('-') else '-'
166
-        name = '%s%s%s' % (lock_file_prefix, sep, name)
167
-
168
-    local_lock_path = lock_path or CONF.lock_path
169
-
170
-    if not local_lock_path:
171
-        raise cfg.RequiredOptError('lock_path')
172
-
173
-    return os.path.join(local_lock_path, name)
174
-
175
-
176
-def external_lock(name, lock_file_prefix=None, lock_path=None):
177
-    LOG.debug('Attempting to grab external lock "%(lock)s"',
178
-              {'lock': name})
179
-
180
-    lock_file_path = _get_lock_path(name, lock_file_prefix, lock_path)
181
-
182
-    return InterProcessLock(lock_file_path)
183
-
184
-
185
-def remove_external_lock_file(name, lock_file_prefix=None):
186
-    """Remove an external lock file when it's not used anymore
187
-    This will be helpful when we have a lot of lock files
188
-    """
189
-    with internal_lock(name):
190
-        lock_file_path = _get_lock_path(name, lock_file_prefix)
191
-        try:
192
-            os.remove(lock_file_path)
193
-        except OSError:
194
-            LOG.info(_LI('Failed to remove file %(file)s'),
195
-                     {'file': lock_file_path})
196
-
197
-
198
-def internal_lock(name):
199
-    with _semaphores_lock:
200
-        try:
201
-            sem = _semaphores[name]
202
-        except KeyError:
203
-            sem = threading.Semaphore()
204
-            _semaphores[name] = sem
205
-
206
-    LOG.debug('Got semaphore "%(lock)s"', {'lock': name})
207
-    return sem
208
-
209
-
210
-@contextlib.contextmanager
211
-def lock(name, lock_file_prefix=None, external=False, lock_path=None):
212
-    """Context based lock
213
-
214
-    This function yields a `threading.Semaphore` instance (if we don't use
215
-    eventlet.monkey_patch(), else `semaphore.Semaphore`) unless external is
216
-    True, in which case, it'll yield an InterProcessLock instance.
217
-
218
-    :param lock_file_prefix: The lock_file_prefix argument is used to provide
219
-      lock files on disk with a meaningful prefix.
220
-
221
-    :param external: The external keyword argument denotes whether this lock
222
-      should work across multiple processes. This means that if two different
223
-      workers both run a method decorated with @synchronized('mylock',
224
-      external=True), only one of them will execute at a time.
225
-    """
226
-    int_lock = internal_lock(name)
227
-    with int_lock:
228
-        if external and not CONF.disable_process_locking:
229
-            ext_lock = external_lock(name, lock_file_prefix, lock_path)
230
-            with ext_lock:
231
-                yield ext_lock
232
-        else:
233
-            yield int_lock
234
-    LOG.debug('Released semaphore "%(lock)s"', {'lock': name})
235
-
236
-
237
-def synchronized(name, lock_file_prefix=None, external=False, lock_path=None):
238
-    """Synchronization decorator.
239
-
240
-    Decorating a method like so::
241
-
242
-        @synchronized('mylock')
243
-        def foo(self, *args):
244
-           ...
245
-
246
-    ensures that only one thread will execute the foo method at a time.
247
-
248
-    Different methods can share the same lock::
249
-
250
-        @synchronized('mylock')
251
-        def foo(self, *args):
252
-           ...
253
-
254
-        @synchronized('mylock')
255
-        def bar(self, *args):
256
-           ...
257
-
258
-    This way only one of either foo or bar can be executing at a time.
259
-    """
260
-
261
-    def wrap(f):
262
-        @functools.wraps(f)
263
-        def inner(*args, **kwargs):
264
-            try:
265
-                with lock(name, lock_file_prefix, external, lock_path):
266
-                    LOG.debug('Got semaphore / lock "%(function)s"',
267
-                              {'function': f.__name__})
268
-                    return f(*args, **kwargs)
269
-            finally:
270
-                LOG.debug('Semaphore / lock released "%(function)s"',
271
-                          {'function': f.__name__})
272
-        return inner
273
-    return wrap
274
-
275
-
276
-def synchronized_with_prefix(lock_file_prefix):
277
-    """Partial object generator for the synchronization decorator.
278
-
279
-    Redefine @synchronized in each project like so::
280
-
281
-        (in nova/utils.py)
282
-        from nova.openstack.common import lockutils
283
-
284
-        synchronized = lockutils.synchronized_with_prefix('nova-')
285
-
286
-
287
-        (in nova/foo.py)
288
-        from nova import utils
289
-
290
-        @utils.synchronized('mylock')
291
-        def bar(self, *args):
292
-           ...
293
-
294
-    The lock_file_prefix argument is used to provide lock files on disk with a
295
-    meaningful prefix.
296
-    """
297
-
298
-    return functools.partial(synchronized, lock_file_prefix=lock_file_prefix)
299
-
300
-
301
-def main(argv):
302
-    """Create a dir for locks and pass it to command from arguments
303
-
304
-    If you run this:
305
-    python -m openstack.common.lockutils python setup.py testr <etc>
306
-
307
-    a temporary directory will be created for all your locks and passed to all
308
-    your tests in an environment variable. The temporary dir will be deleted
309
-    afterwards and the return value will be preserved.
310
-    """
311
-
312
-    lock_dir = tempfile.mkdtemp()
313
-    os.environ["MONASCA_LOCK_PATH"] = lock_dir
314
-    try:
315
-        ret_val = subprocess.call(argv[1:])
316
-    finally:
317
-        shutil.rmtree(lock_dir, ignore_errors=True)
318
-    return ret_val
319
-
320
-
321
-if __name__ == '__main__':
322
-    sys.exit(main(sys.argv))

+ 0
- 713
kiloeyes/openstack/common/log.py View File

@@ -1,713 +0,0 @@
1
-# Copyright 2011 OpenStack Foundation.
2
-# Copyright 2010 United States Government as represented by the
3
-# Administrator of the National Aeronautics and Space Administration.
4
-# All Rights Reserved.
5
-#
6
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
7
-#    not use this file except in compliance with the License. You may obtain
8
-#    a copy of the License at
9
-#
10
-#         http://www.apache.org/licenses/LICENSE-2.0
11
-#
12
-#    Unless required by applicable law or agreed to in writing, software
13
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
14
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
15
-#    License for the specific language governing permissions and limitations
16
-#    under the License.
17
-
18
-"""OpenStack logging handler.
19
-
20
-This module adds to logging functionality by adding the option to specify
21
-a context object when calling the various log methods.  If the context object
22
-is not specified, default formatting is used. Additionally, an instance uuid
23
-may be passed as part of the log message, which is intended to make it easier
24
-for admins to find messages related to a specific instance.
25
-
26
-It also allows setting of formatting information through conf.
27
-
28
-"""
29
-
30
-import inspect
31
-import itertools
32
-import logging
33
-import logging.config
34
-import logging.handlers
35
-import os
36
-import socket
37
-import sys
38
-import traceback
39
-
40
-from oslo_config import cfg
41
-import six
42
-from six import moves
43
-
44
-_PY26 = sys.version_info[0:2] == (2, 6)
45
-
46
-from kiloeyes.openstack.common.gettextutils import _
47
-from kiloeyes.openstack.common import importutils
48
-from kiloeyes.openstack.common import jsonutils
49
-from kiloeyes.openstack.common import local
50
-# NOTE(flaper87): Pls, remove when graduating this module
51
-# from the incubator.
52
-from kiloeyes.openstack.common.strutils import mask_password  # noqa
53
-
54
-
55
-_DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
56
-
57
-
58
-common_cli_opts = [
59
-    cfg.BoolOpt('debug',
60
-                short='d',
61
-                default=False,
62
-                help='Print debugging output (set logging level to '
63
-                     'DEBUG instead of default WARNING level).'),
64
-    cfg.BoolOpt('verbose',
65
-                short='v',
66
-                default=False,
67
-                help='Print more verbose output (set logging level to '
68
-                     'INFO instead of default WARNING level).'),
69
-]
70
-
71
-logging_cli_opts = [
72
-    cfg.StrOpt('log-config-append',
73
-               metavar='PATH',
74
-               deprecated_name='log-config',
75
-               help='The name of a logging configuration file. This file '
76
-                    'is appended to any existing logging configuration '
77
-                    'files. For details about logging configuration files, '
78
-                    'see the Python logging module documentation.'),
79
-    cfg.StrOpt('log-format',
80
-               metavar='FORMAT',
81
-               help='DEPRECATED. '
82
-                    'A logging.Formatter log message format string which may '
83
-                    'use any of the available logging.LogRecord attributes. '
84
-                    'This option is deprecated.  Please use '
85
-                    'logging_context_format_string and '
86
-                    'logging_default_format_string instead.'),
87
-    cfg.StrOpt('log-date-format',
88
-               default=_DEFAULT_LOG_DATE_FORMAT,
89
-               metavar='DATE_FORMAT',
90
-               help='Format string for %%(asctime)s in log records. '
91
-                    'Default: %(default)s .'),
92
-    cfg.StrOpt('log-file',
93
-               metavar='PATH',
94
-               deprecated_name='logfile',
95
-               help='(Optional) Name of log file to output to. '
96
-                    'If no default is set, logging will go to stdout.'),
97
-    cfg.StrOpt('log-dir',
98
-               deprecated_name='logdir',
99
-               help='(Optional) The base directory used for relative '
100
-                    '--log-file paths.'),
101
-    cfg.BoolOpt('use-syslog',
102
-                default=False,
103
-                help='Use syslog for logging. '
104
-                     'Existing syslog format is DEPRECATED during I, '
105
-                     'and will change in J to honor RFC5424.'),
106
-    cfg.BoolOpt('use-syslog-rfc-format',
107
-                # TODO(bogdando) remove or use True after existing
108
-                #    syslog format deprecation in J
109
-                default=False,
110
-                help='(Optional) Enables or disables syslog rfc5424 format '
111
-                     'for logging. If enabled, prefixes the MSG part of the '
112
-                     'syslog message with APP-NAME (RFC5424). The '
113
-                     'format without the APP-NAME is deprecated in I, '
114
-                     'and will be removed in J.'),
115
-    cfg.StrOpt('syslog-log-facility',
116
-               default='LOG_USER',
117
-               help='Syslog facility to receive log lines.')
118
-]
119
-
120
-generic_log_opts = [
121
-    cfg.BoolOpt('use_stderr',
122
-                default=True,
123
-                help='Log output to standard error.')
124
-]
125
-
126
-DEFAULT_LOG_LEVELS = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN',
127
-                      'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO',
128
-                      'oslo.messaging=INFO', 'iso8601=WARN',
129
-                      'requests.packages.urllib3.connectionpool=WARN',
130
-                      'urllib3.connectionpool=WARN', 'websocket=WARN',
131
-                      "keystonemiddleware=WARN", "routes.middleware=WARN",
132
-                      "stevedore=WARN"]
133
-
134
-log_opts = [
135
-    cfg.StrOpt('logging_context_format_string',
136
-               default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s '
137
-                       '%(name)s [%(request_id)s %(user_identity)s] '
138
-                       '%(instance)s%(message)s',
139
-               help='Format string to use for log messages with context.'),
140
-    cfg.StrOpt('logging_default_format_string',
141
-               default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s '
142
-                       '%(name)s [-] %(instance)s%(message)s',
143
-               help='Format string to use for log messages without context.'),
144
-    cfg.StrOpt('logging_debug_format_suffix',
145
-               default='%(funcName)s %(pathname)s:%(lineno)d',
146
-               help='Data to append to log format when level is DEBUG.'),
147
-    cfg.StrOpt('logging_exception_prefix',
148
-               default='%(asctime)s.%(msecs)03d %(process)d TRACE %(name)s '
149
-               '%(instance)s',
150
-               help='Prefix each line of exception output with this format.'),
151
-    cfg.ListOpt('default_log_levels',
152
-                default=DEFAULT_LOG_LEVELS,
153
-                help='List of logger=LEVEL pairs.'),
154
-    cfg.BoolOpt('publish_errors',
155
-                default=False,
156
-                help='Enables or disables publication of error events.'),
157
-    cfg.BoolOpt('fatal_deprecations',
158
-                default=False,
159
-                help='Enables or disables fatal status of deprecations.'),
160
-
161
-    # NOTE(mikal): there are two options here because sometimes we are handed
162
-    # a full instance (and could include more information), and other times we
163
-    # are just handed a UUID for the instance.
164
-    cfg.StrOpt('instance_format',
165
-               default='[instance: %(uuid)s] ',
166
-               help='The format for an instance that is passed with the log '
167
-                    'message.'),
168
-    cfg.StrOpt('instance_uuid_format',
169
-               default='[instance: %(uuid)s] ',
170
-               help='The format for an instance UUID that is passed with the '
171
-                    'log message.'),
172
-]
173
-
174
-CONF = cfg.CONF
175
-CONF.register_cli_opts(common_cli_opts)
176
-CONF.register_cli_opts(logging_cli_opts)
177
-CONF.register_opts(generic_log_opts)
178
-CONF.register_opts(log_opts)
179
-
180
-# our new audit level
181
-# NOTE(jkoelker) Since we synthesized an audit level, make the logging
182
-#                module aware of it so it acts like other levels.
183
-logging.AUDIT = logging.INFO + 1
184
-logging.addLevelName(logging.AUDIT, 'AUDIT')
185
-
186
-
187
-try:
188
-    NullHandler = logging.NullHandler
189
-except AttributeError:  # NOTE(jkoelker) NullHandler added in Python 2.7
190
-    class NullHandler(logging.Handler):
191
-        def handle(self, record):
192
-            pass
193
-
194
-        def emit(self, record):
195
-            pass
196
-
197
-        def createLock(self):
198
-            self.lock = None
199
-
200
-
201
-def _dictify_context(context):
202
-    if context is None:
203
-        return None
204
-    if not isinstance(context, dict) and getattr(context, 'to_dict', None):
205
-        context = context.to_dict()
206
-    return context
207
-
208
-
209
-def _get_binary_name():
210
-    return os.path.basename(inspect.stack()[-1][1])
211
-
212
-
213
-def _get_log_file_path(binary=None):
214
-    logfile = CONF.log_file
215
-    logdir = CONF.log_dir
216
-
217
-    if logfile and not logdir:
218
-        return logfile
219
-
220
-    if logfile and logdir:
221
-        return os.path.join(logdir, logfile)
222
-
223
-    if logdir:
224
-        binary = binary or _get_binary_name()
225
-        return '%s.log' % (os.path.join(logdir, binary),)
226
-
227
-    return None
228
-
229
-
230
-class BaseLoggerAdapter(logging.LoggerAdapter):
231
-
232
-    def audit(self, msg, *args, **kwargs):
233
-        self.log(logging.AUDIT, msg, *args, **kwargs)
234
-
235
-    def isEnabledFor(self, level):
236
-        if _PY26:
237
-            # This method was added in python 2.7 (and it does the exact
238
-            # same logic, so we need to do the exact same logic so that
239
-            # python 2.6 has this capability as well).
240
-            return self.logger.isEnabledFor(level)
241
-        else:
242
-            return super(BaseLoggerAdapter, self).isEnabledFor(level)
243
-
244
-
245
-class LazyAdapter(BaseLoggerAdapter):
246
-    def __init__(self, name='unknown', version='unknown'):
247
-        self._logger = None
248
-        self.extra = {}
249
-        self.name = name
250
-        self.version = version
251
-
252
-    @property
253
-    def logger(self):
254
-        if not self._logger:
255
-            self._logger = getLogger(self.name, self.version)
256
-            if six.PY3:
257
-                # In Python 3, the code fails because the 'manager' attribute
258
-                # cannot be found when using a LoggerAdapter as the
259
-                # underlying logger. Work around this issue.
260
-                self._logger.manager = self._logger.logger.manager
261
-        return self._logger
262
-
263
-
264
-class ContextAdapter(BaseLoggerAdapter):
265
-    warn = logging.LoggerAdapter.warning
266
-
267
-    def __init__(self, logger, project_name, version_string):
268
-        self.logger = logger
269
-        self.project = project_name
270
-        self.version = version_string
271
-        self._deprecated_messages_sent = dict()
272
-
273
-    @property
274
-    def handlers(self):
275
-        return self.logger.handlers
276
-
277
-    def deprecated(self, msg, *args, **kwargs):
278
-        """Call this method when a deprecated feature is used.
279
-
280
-        If the system is configured for fatal deprecations then the message
281
-        is logged at the 'critical' level and :class:`DeprecatedConfig` will
282
-        be raised.
283
-
284
-        Otherwise, the message will be logged (once) at the 'warn' level.
285
-
286
-        :raises: :class:`DeprecatedConfig` if the system is configured for
287
-                 fatal deprecations.
288
-
289
-        """
290
-        stdmsg = _("Deprecated: %s") % msg
291
-        if CONF.fatal_deprecations:
292
-            self.critical(stdmsg, *args, **kwargs)
293
-            raise DeprecatedConfig(msg=stdmsg)
294
-
295
-        # Using a list because a tuple with dict can't be stored in a set.
296
-        sent_args = self._deprecated_messages_sent.setdefault(msg, list())
297
-
298
-        if args in sent_args:
299
-            # Already logged this message, so don't log it again.
300
-            return
301
-
302
-        sent_args.append(args)
303
-        self.warn(stdmsg, *args, **kwargs)
304
-
305
-    def process(self, msg, kwargs):
306
-        # NOTE(jecarey): If msg is not unicode, coerce it into unicode
307
-        #                before it can get to the python logging and
308
-        #                possibly cause string encoding trouble
309
-        if not isinstance(msg, six.text_type):
310
-            msg = six.text_type(msg)
311
-
312
-        if 'extra' not in kwargs:
313
-            kwargs['extra'] = {}
314
-        extra = kwargs['extra']
315
-
316
-        context = kwargs.pop('context', None)
317
-        if not context:
318
-            context = getattr(local.store, 'context', None)
319
-        if context:
320
-            extra.update(_dictify_context(context))
321
-
322
-        instance = kwargs.pop('instance', None)
323
-        instance_uuid = (extra.get('instance_uuid') or
324
-                         kwargs.pop('instance_uuid', None))
325
-        instance_extra = ''
326
-        if instance:
327
-            instance_extra = CONF.instance_format % instance
328
-        elif instance_uuid:
329
-            instance_extra = (CONF.instance_uuid_format
330
-                              % {'uuid': instance_uuid})
331
-        extra['instance'] = instance_extra
332
-
333
-        extra.setdefault('user_identity', kwargs.pop('user_identity', None))
334
-
335
-        extra['project'] = self.project
336
-        extra['version'] = self.version
337
-        extra['extra'] = extra.copy()
338
-        return msg, kwargs
339
-
340
-
341
-class JSONFormatter(logging.Formatter):
342
-    def __init__(self, fmt=None, datefmt=None):
343
-        # NOTE(jkoelker) we ignore the fmt argument, but its still there
344
-        #                since logging.config.fileConfig passes it.
345
-        self.datefmt = datefmt
346
-
347
-    def formatException(self, ei, strip_newlines=True):
348
-        lines = traceback.format_exception(*ei)
349
-        if strip_newlines:
350
-            lines = [moves.filter(
351
-                lambda x: x,
352
-                line.rstrip().splitlines()) for line in lines]
353
-            lines = list(itertools.chain(*lines))
354
-        return lines
355
-
356
-    def format(self, record):
357
-        message = {'message': record.getMessage(),
358
-                   'asctime': self.formatTime(record, self.datefmt),
359
-                   'name': record.name,
360
-                   'msg': record.msg,
361
-                   'args': record.args,
362
-                   'levelname': record.levelname,
363
-                   'levelno': record.levelno,
364
-                   'pathname': record.pathname,
365
-                   'filename': record.filename,
366
-                   'module': record.module,
367
-                   'lineno': record.lineno,
368
-                   'funcname': record.funcName,
369
-                   'created': record.created,
370
-                   'msecs': record.msecs,
371
-                   'relative_created': record.relativeCreated,
372
-                   'thread': record.thread,
373
-                   'thread_name': record.threadName,
374
-                   'process_name': record.processName,
375
-                   'process': record.process,
376
-                   'traceback': None}
377
-
378
-        if hasattr(record, 'extra'):
379
-            message['extra'] = record.extra
380
-
381
-        if record.exc_info:
382
-            message['traceback'] = self.formatException(record.exc_info)
383
-
384
-        return jsonutils.dumps(message)
385
-
386
-
387
-def _create_logging_excepthook(product_name):
388
-    def logging_excepthook(exc_type, value, tb):
389
-        extra = {'exc_info': (exc_type, value, tb)}
390
-        getLogger(product_name).critical(
391
-            "".join(traceback.format_exception_only(exc_type, value)),
392
-            **extra)
393
-    return logging_excepthook
394
-
395
-
396
-class LogConfigError(Exception):
397
-
398
-    message = _('Error loading logging config %(log_config)s: %(err_msg)s')
399
-
400
-    def __init__(self, log_config, err_msg):
401
-        self.log_config = log_config
402
-        self.err_msg = err_msg
403
-
404
-    def __str__(self):
405
-        return self.message % dict(log_config=self.log_config,
406
-                                   err_msg=self.err_msg)
407
-
408
-
409
-def _load_log_config(log_config_append):
410
-    try:
411
-        logging.config.fileConfig(log_config_append,
412
-                                  disable_existing_loggers=False)
413
-    except (moves.configparser.Error, KeyError) as exc:
414
-        raise LogConfigError(log_config_append, six.text_type(exc))
415
-
416
-
417
-def setup(product_name, version='unknown'):
418
-    """Setup logging."""
419
-    if CONF.log_config_append:
420
-        _load_log_config(CONF.log_config_append)
421
-    else:
422
-        _setup_logging_from_conf(product_name, version)
423
-    sys.excepthook = _create_logging_excepthook(product_name)
424
-
425
-
426
-def set_defaults(logging_context_format_string=None,
427
-                 default_log_levels=None):
428
-    # Just in case the caller is not setting the
429
-    # default_log_level. This is insurance because
430
-    # we introduced the default_log_level parameter
431
-    # later in a backwards in-compatible change
432
-    if default_log_levels is not None:
433
-        cfg.set_defaults(
434
-            log_opts,
435
-            default_log_levels=default_log_levels)
436
-    if logging_context_format_string is not None:
437
-        cfg.set_defaults(
438
-            log_opts,
439
-            logging_context_format_string=logging_context_format_string)
440
-
441
-
442
-def _find_facility_from_conf():
443
-    facility_names = logging.handlers.SysLogHandler.facility_names
444
-    facility = getattr(logging.handlers.SysLogHandler,
445
-                       CONF.syslog_log_facility,
446
-                       None)
447
-
448
-    if facility is None and CONF.syslog_log_facility in facility_names:
449
-        facility = facility_names.get(CONF.syslog_log_facility)
450
-
451
-    if facility is None:
452
-        valid_facilities = facility_names.keys()
453
-        consts = ['LOG_AUTH', 'LOG_AUTHPRIV', 'LOG_CRON', 'LOG_DAEMON',
454
-                  'LOG_FTP', 'LOG_KERN', 'LOG_LPR', 'LOG_MAIL', 'LOG_NEWS',
455
-                  'LOG_AUTH', 'LOG_SYSLOG', 'LOG_USER', 'LOG_UUCP',
456
-                  'LOG_LOCAL0', 'LOG_LOCAL1', 'LOG_LOCAL2', 'LOG_LOCAL3',
457
-                  'LOG_LOCAL4', 'LOG_LOCAL5', 'LOG_LOCAL6', 'LOG_LOCAL7']
458
-        valid_facilities.extend(consts)
459
-        raise TypeError(_('syslog facility must be one of: %s') %
460
-                        ', '.join("'%s'" % fac
461
-                                  for fac in valid_facilities))
462
-
463
-    return facility
464
-
465
-
466
-class RFCSysLogHandler(logging.handlers.SysLogHandler):
467
-    def __init__(self, *args, **kwargs):
468
-        self.binary_name = _get_binary_name()
469
-        # Do not use super() unless type(logging.handlers.SysLogHandler)
470
-        #  is 'type' (Python 2.7).
471
-        # Use old style calls, if the type is 'classobj' (Python 2.6)
472
-        logging.handlers.SysLogHandler.__init__(self, *args, **kwargs)
473
-
474
-    def format(self, record):
475
-        # Do not use super() unless type(logging.handlers.SysLogHandler)
476
-        #  is 'type' (Python 2.7).
477
-        # Use old style calls, if the type is 'classobj' (Python 2.6)
478
-        msg = logging.handlers.SysLogHandler.format(self, record)
479
-        msg = self.binary_name + ' ' + msg
480
-        return msg
481
-
482
-
483
-def _setup_logging_from_conf(project, version):
484
-    log_root = getLogger(None).logger
485
-    for handler in log_root.handlers:
486
-        log_root.removeHandler(handler)
487
-
488
-    logpath = _get_log_file_path()
489
-    if logpath:
490
-        filelog = logging.handlers.WatchedFileHandler(logpath)
491
-        log_root.addHandler(filelog)
492
-
493
-    if CONF.use_stderr:
494
-        streamlog = ColorHandler()
495
-        log_root.addHandler(streamlog)
496
-
497
-    elif not logpath:
498
-        # pass sys.stdout as a positional argument
499
-        # python2.6 calls the argument strm, in 2.7 it's stream
500
-        streamlog = logging.StreamHandler(sys.stdout)
501
-        log_root.addHandler(streamlog)
502
-
503
-    if CONF.publish_errors:
504
-        try:
505
-            handler = importutils.import_object(
506
-                "kiloeyes.openstack.common.log_handler.PublishErrorsHandler",
507
-                logging.ERROR)
508
-        except ImportError:
509
-            handler = importutils.import_object(
510
-                "oslo.messaging.notify.log_handler.PublishErrorsHandler",
511
-                logging.ERROR)
512
-        log_root.addHandler(handler)
513
-
514
-    datefmt = CONF.log_date_format
515
-    for handler in log_root.handlers:
516
-        # NOTE(alaski): CONF.log_format overrides everything currently.  This
517
-        # should be deprecated in favor of context aware formatting.
518
-        if CONF.log_format:
519
-            handler.setFormatter(logging.Formatter(fmt=CONF.log_format,
520
-                                                   datefmt=datefmt))
521
-            log_root.info('Deprecated: log_format is now deprecated and will '
522
-                          'be removed in the next release')
523
-        else:
524
-            handler.setFormatter(ContextFormatter(project=project,
525
-                                                  version=version,
526
-                                                  datefmt=datefmt))
527
-
528
-    if CONF.debug:
529
-        log_root.setLevel(logging.DEBUG)
530
-    elif CONF.verbose:
531
-        log_root.setLevel(logging.INFO)
532
-    else:
533
-        log_root.setLevel(logging.WARNING)
534
-
535
-    for pair in CONF.default_log_levels:
536
-        mod, _sep, level_name = pair.partition('=')
537
-        logger = logging.getLogger(mod)
538
-        # NOTE(AAzza) in python2.6 Logger.setLevel doesn't convert string name
539
-        # to integer code.
540
-        if sys.version_info < (2, 7):
541
-            level = logging.getLevelName(level_name)
542
-            logger.setLevel(level)
543
-        else:
544
-            logger.setLevel(level_name)
545
-
546
-    if CONF.use_syslog:
547
-        try:
548
-            facility = _find_facility_from_conf()
549
-            # TODO(bogdando) use the format provided by RFCSysLogHandler
550
-            #   after existing syslog format deprecation in J
551
-            if CONF.use_syslog_rfc_format:
552
-                syslog = RFCSysLogHandler(facility=facility)
553
-            else:
554
-                syslog = logging.handlers.SysLogHandler(facility=facility)
555
-            log_root.addHandler(syslog)
556
-        except socket.error:
557
-            log_root.error('Unable to add syslog handler. Verify that syslog'
558
-                           'is running.')
559
-
560
-
561
-_loggers = {}
562
-
563
-
564
-def getLogger(name='unknown', version='unknown'):
565
-    if name not in _loggers:
566
-        _loggers[name] = ContextAdapter(logging.getLogger(name),
567
-                                        name,
568
-                                        version)
569
-    return _loggers[name]
570