Oslo incubator updates
Change-Id: I9559d8a6d59477f6b5ba3f82ab9ecf9b71b75f70
This commit is contained in:
@@ -12,7 +12,7 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
from octavia.openstack.common import excutils
|
from oslo.utils import excutils
|
||||||
|
|
||||||
|
|
||||||
class AmphoraDriverError(Exception):
|
class AmphoraDriverError(Exception):
|
||||||
@@ -116,4 +116,4 @@ class HealthMonitorProvisioningError(ProvisioningErrors):
|
|||||||
|
|
||||||
class NodeProvisioningError(ProvisioningErrors):
|
class NodeProvisioningError(ProvisioningErrors):
|
||||||
|
|
||||||
message = _('couldn\'t provision Node')
|
message = _('couldn\'t provision Node')
|
||||||
|
|||||||
@@ -21,10 +21,10 @@ from barbicanclient import client as barbican_client
|
|||||||
from keystoneclient.auth.identity import v3 as keystone_client
|
from keystoneclient.auth.identity import v3 as keystone_client
|
||||||
from keystoneclient import session
|
from keystoneclient import session
|
||||||
from oslo.config import cfg
|
from oslo.config import cfg
|
||||||
|
from oslo.utils import excutils
|
||||||
|
|
||||||
from octavia.certificates.common import cert
|
from octavia.certificates.common import cert
|
||||||
from octavia.openstack.common import excutils
|
from octavia.i18n import _LE
|
||||||
from octavia.openstack.common import gettextutils
|
|
||||||
from octavia.openstack.common import log as logging
|
from octavia.openstack.common import log as logging
|
||||||
|
|
||||||
|
|
||||||
@@ -39,7 +39,7 @@ class BarbicanCert(cert.Cert):
|
|||||||
def __init__(self, cert_container):
|
def __init__(self, cert_container):
|
||||||
if not isinstance(cert_container,
|
if not isinstance(cert_container,
|
||||||
barbican_client.containers.CertificateContainer):
|
barbican_client.containers.CertificateContainer):
|
||||||
raise TypeError(gettextutils._LE(
|
raise TypeError(_LE(
|
||||||
"Retrieved Barbican Container is not of the correct type "
|
"Retrieved Barbican Container is not of the correct type "
|
||||||
"(certificate)."))
|
"(certificate)."))
|
||||||
self._cert_container = cert_container
|
self._cert_container = cert_container
|
||||||
@@ -79,7 +79,7 @@ class BarbicanKeystoneAuth(object):
|
|||||||
cls._keystone_session = session.Session(auth=kc)
|
cls._keystone_session = session.Session(auth=kc)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
with excutils.save_and_reraise_exception():
|
with excutils.save_and_reraise_exception():
|
||||||
LOG.error(gettextutils._LE(
|
LOG.error(_LE(
|
||||||
"Error creating Keystone session: %s"), e)
|
"Error creating Keystone session: %s"), e)
|
||||||
return cls._keystone_session
|
return cls._keystone_session
|
||||||
|
|
||||||
@@ -97,6 +97,6 @@ class BarbicanKeystoneAuth(object):
|
|||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
with excutils.save_and_reraise_exception():
|
with excutils.save_and_reraise_exception():
|
||||||
LOG.error(gettextutils._LE(
|
LOG.error(_LE(
|
||||||
"Error creating Barbican client: %s"), e)
|
"Error creating Barbican client: %s"), e)
|
||||||
return cls._barbican_client
|
return cls._barbican_client
|
||||||
|
|||||||
@@ -14,8 +14,7 @@
|
|||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
from oslo.config import cfg
|
from oslo.config import cfg
|
||||||
|
from oslo.utils import importutils
|
||||||
from octavia.openstack.common import importutils
|
|
||||||
|
|
||||||
certgen_opts = [
|
certgen_opts = [
|
||||||
cfg.StrOpt('cert_generator_class',
|
cfg.StrOpt('cert_generator_class',
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ from oslo.config import cfg
|
|||||||
|
|
||||||
from octavia.certificates.generator import cert_gen
|
from octavia.certificates.generator import cert_gen
|
||||||
from octavia.common import exceptions
|
from octavia.common import exceptions
|
||||||
from octavia.openstack.common import gettextutils
|
from octavia.i18n import _LE, _LI
|
||||||
from octavia.openstack.common import log as logging
|
from octavia.openstack.common import log as logging
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
@@ -53,11 +53,11 @@ class LocalCertGenerator(cert_gen.CertGenerator):
|
|||||||
:return: Signed certificate
|
:return: Signed certificate
|
||||||
:raises Exception: if certificate signing fails
|
:raises Exception: if certificate signing fails
|
||||||
"""
|
"""
|
||||||
LOG.info(gettextutils._LI(
|
LOG.info(_LI(
|
||||||
"Signing a certificate request using pyOpenSSL locally."
|
"Signing a certificate request using pyOpenSSL locally."
|
||||||
))
|
))
|
||||||
if not ca_cert:
|
if not ca_cert:
|
||||||
LOG.info(gettextutils._LI("Using CA Certificate from config."))
|
LOG.info(_LI("Using CA Certificate from config."))
|
||||||
try:
|
try:
|
||||||
ca_cert = open(CONF.certificates.ca_certificate).read()
|
ca_cert = open(CONF.certificates.ca_certificate).read()
|
||||||
except IOError:
|
except IOError:
|
||||||
@@ -66,7 +66,7 @@ class LocalCertGenerator(cert_gen.CertGenerator):
|
|||||||
.format(CONF.certificates.ca_certificate)
|
.format(CONF.certificates.ca_certificate)
|
||||||
)
|
)
|
||||||
if not ca_key:
|
if not ca_key:
|
||||||
LOG.info(gettextutils._LI("Using CA Private Key from config."))
|
LOG.info(_LI("Using CA Private Key from config."))
|
||||||
try:
|
try:
|
||||||
ca_key = open(CONF.certificates.ca_private_key).read()
|
ca_key = open(CONF.certificates.ca_private_key).read()
|
||||||
except IOError:
|
except IOError:
|
||||||
@@ -77,11 +77,11 @@ class LocalCertGenerator(cert_gen.CertGenerator):
|
|||||||
if not ca_key_pass:
|
if not ca_key_pass:
|
||||||
ca_key_pass = CONF.certificates.ca_private_key_passphrase
|
ca_key_pass = CONF.certificates.ca_private_key_passphrase
|
||||||
if ca_key_pass:
|
if ca_key_pass:
|
||||||
LOG.info(gettextutils._LI(
|
LOG.info(_LI(
|
||||||
"Using CA Private Key Passphrase from config."
|
"Using CA Private Key Passphrase from config."
|
||||||
))
|
))
|
||||||
else:
|
else:
|
||||||
LOG.info(gettextutils._LI(
|
LOG.info(_LI(
|
||||||
"No Passphrase found for CA Private Key, not using one."
|
"No Passphrase found for CA Private Key, not using one."
|
||||||
))
|
))
|
||||||
if not ca_digest:
|
if not ca_digest:
|
||||||
@@ -104,5 +104,5 @@ class LocalCertGenerator(cert_gen.CertGenerator):
|
|||||||
|
|
||||||
return crypto.dump_certificate(crypto.FILETYPE_PEM, new_cert)
|
return crypto.dump_certificate(crypto.FILETYPE_PEM, new_cert)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
LOG.error(gettextutils._LE("Unable to sign certificate."))
|
LOG.error(_LE("Unable to sign certificate."))
|
||||||
raise exceptions.CertificateGenerationException(e)
|
raise exceptions.CertificateGenerationException(e)
|
||||||
|
|||||||
@@ -14,8 +14,7 @@
|
|||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
from oslo.config import cfg
|
from oslo.config import cfg
|
||||||
|
from oslo.utils import importutils
|
||||||
from octavia.openstack.common import importutils
|
|
||||||
|
|
||||||
certmgr_opts = [
|
certmgr_opts = [
|
||||||
cfg.StrOpt('cert_manager_class',
|
cfg.StrOpt('cert_manager_class',
|
||||||
|
|||||||
@@ -17,11 +17,11 @@
|
|||||||
Cert manager implementation for Barbican
|
Cert manager implementation for Barbican
|
||||||
"""
|
"""
|
||||||
from oslo.config import cfg
|
from oslo.config import cfg
|
||||||
|
from oslo.utils import excutils
|
||||||
|
|
||||||
from octavia.certificates.common import barbican as barbican_common
|
from octavia.certificates.common import barbican as barbican_common
|
||||||
from octavia.certificates.manager import cert_mgr
|
from octavia.certificates.manager import cert_mgr
|
||||||
from octavia.openstack.common import excutils
|
from octavia.i18n import _LE, _LI, _LW
|
||||||
from octavia.openstack.common import gettextutils
|
|
||||||
from octavia.openstack.common import log as logging
|
from octavia.openstack.common import log as logging
|
||||||
|
|
||||||
|
|
||||||
@@ -50,7 +50,7 @@ class BarbicanCertManager(cert_mgr.CertManager):
|
|||||||
"""
|
"""
|
||||||
connection = barbican_common.BarbicanKeystoneAuth.get_barbican_client()
|
connection = barbican_common.BarbicanKeystoneAuth.get_barbican_client()
|
||||||
|
|
||||||
LOG.info(gettextutils._LI(
|
LOG.info(_LI(
|
||||||
"Storing certificate container '{0}' in Barbican."
|
"Storing certificate container '{0}' in Barbican."
|
||||||
).format(name))
|
).format(name))
|
||||||
|
|
||||||
@@ -99,16 +99,16 @@ class BarbicanCertManager(cert_mgr.CertManager):
|
|||||||
old_ref = i.secret_ref
|
old_ref = i.secret_ref
|
||||||
try:
|
try:
|
||||||
i.delete()
|
i.delete()
|
||||||
LOG.info(gettextutils._LI(
|
LOG.info(_LI(
|
||||||
"Deleted secret {0} ({1}) during rollback."
|
"Deleted secret {0} ({1}) during rollback."
|
||||||
).format(i.name, old_ref))
|
).format(i.name, old_ref))
|
||||||
except Exception:
|
except Exception:
|
||||||
LOG.warning(gettextutils._LW(
|
LOG.warning(_LW(
|
||||||
"Failed to delete {0} ({1}) during rollback. This "
|
"Failed to delete {0} ({1}) during rollback. This "
|
||||||
"might not be a problem."
|
"might not be a problem."
|
||||||
).format(i.name, old_ref))
|
).format(i.name, old_ref))
|
||||||
with excutils.save_and_reraise_exception():
|
with excutils.save_and_reraise_exception():
|
||||||
LOG.error(gettextutils._LE(
|
LOG.error(_LE(
|
||||||
"Error storing certificate data: {0}"
|
"Error storing certificate data: {0}"
|
||||||
).format(str(e)))
|
).format(str(e)))
|
||||||
|
|
||||||
@@ -128,7 +128,7 @@ class BarbicanCertManager(cert_mgr.CertManager):
|
|||||||
"""
|
"""
|
||||||
connection = barbican_common.BarbicanKeystoneAuth.get_barbican_client()
|
connection = barbican_common.BarbicanKeystoneAuth.get_barbican_client()
|
||||||
|
|
||||||
LOG.info(gettextutils._LI(
|
LOG.info(_LI(
|
||||||
"Loading certificate container {0} from Barbican."
|
"Loading certificate container {0} from Barbican."
|
||||||
).format(cert_ref))
|
).format(cert_ref))
|
||||||
try:
|
try:
|
||||||
@@ -145,7 +145,7 @@ class BarbicanCertManager(cert_mgr.CertManager):
|
|||||||
return barbican_common.BarbicanCert(cert_container)
|
return barbican_common.BarbicanCert(cert_container)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
with excutils.save_and_reraise_exception():
|
with excutils.save_and_reraise_exception():
|
||||||
LOG.error(gettextutils._LE(
|
LOG.error(_LE(
|
||||||
"Error getting {0}: {1}"
|
"Error getting {0}: {1}"
|
||||||
).format(cert_ref, str(e)))
|
).format(cert_ref, str(e)))
|
||||||
|
|
||||||
@@ -162,7 +162,7 @@ class BarbicanCertManager(cert_mgr.CertManager):
|
|||||||
"""
|
"""
|
||||||
connection = barbican_common.BarbicanKeystoneAuth.get_barbican_client()
|
connection = barbican_common.BarbicanKeystoneAuth.get_barbican_client()
|
||||||
|
|
||||||
LOG.info(gettextutils._LI(
|
LOG.info(_LI(
|
||||||
"Deregistering as a consumer of {0} in Barbican."
|
"Deregistering as a consumer of {0} in Barbican."
|
||||||
).format(cert_ref))
|
).format(cert_ref))
|
||||||
try:
|
try:
|
||||||
@@ -173,7 +173,7 @@ class BarbicanCertManager(cert_mgr.CertManager):
|
|||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
with excutils.save_and_reraise_exception():
|
with excutils.save_and_reraise_exception():
|
||||||
LOG.error(gettextutils._LE(
|
LOG.error(_LE(
|
||||||
"Error deregistering as a consumer of {0}: {1}"
|
"Error deregistering as a consumer of {0}: {1}"
|
||||||
).format(cert_ref, str(e)))
|
).format(cert_ref, str(e)))
|
||||||
|
|
||||||
@@ -186,7 +186,7 @@ class BarbicanCertManager(cert_mgr.CertManager):
|
|||||||
"""
|
"""
|
||||||
connection = barbican_common.BarbicanKeystoneAuth.get_barbican_client()
|
connection = barbican_common.BarbicanKeystoneAuth.get_barbican_client()
|
||||||
|
|
||||||
LOG.info(gettextutils._LI(
|
LOG.info(_LI(
|
||||||
"Recursively deleting certificate container {0} from Barbican."
|
"Recursively deleting certificate container {0} from Barbican."
|
||||||
).format(cert_ref))
|
).format(cert_ref))
|
||||||
try:
|
try:
|
||||||
@@ -200,6 +200,6 @@ class BarbicanCertManager(cert_mgr.CertManager):
|
|||||||
certificate_container.delete()
|
certificate_container.delete()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
with excutils.save_and_reraise_exception():
|
with excutils.save_and_reraise_exception():
|
||||||
LOG.error(gettextutils._LE(
|
LOG.error(_LE(
|
||||||
"Error recursively deleting container {0}: {1}"
|
"Error recursively deleting container {0}: {1}"
|
||||||
).format(cert_ref, str(e)))
|
).format(cert_ref, str(e)))
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ from oslo.config import cfg
|
|||||||
from octavia.certificates.common import local as local_common
|
from octavia.certificates.common import local as local_common
|
||||||
from octavia.certificates.manager import cert_mgr
|
from octavia.certificates.manager import cert_mgr
|
||||||
from octavia.common import exceptions
|
from octavia.common import exceptions
|
||||||
from octavia.openstack.common import gettextutils
|
from octavia.i18n import _LE, _LI
|
||||||
from octavia.openstack.common import log as logging
|
from octavia.openstack.common import log as logging
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
@@ -50,7 +50,7 @@ class LocalCertManager(cert_mgr.CertManager):
|
|||||||
cert_ref = str(uuid.uuid4())
|
cert_ref = str(uuid.uuid4())
|
||||||
filename_base = os.path.join(CONF.certificates.storage_path, cert_ref)
|
filename_base = os.path.join(CONF.certificates.storage_path, cert_ref)
|
||||||
|
|
||||||
LOG.info(gettextutils._LI(
|
LOG.info(_LI(
|
||||||
"Storing certificate data on the local filesystem."
|
"Storing certificate data on the local filesystem."
|
||||||
))
|
))
|
||||||
try:
|
try:
|
||||||
@@ -73,7 +73,7 @@ class LocalCertManager(cert_mgr.CertManager):
|
|||||||
with open(filename_pkp, 'w') as pass_file:
|
with open(filename_pkp, 'w') as pass_file:
|
||||||
pass_file.write(private_key_passphrase)
|
pass_file.write(private_key_passphrase)
|
||||||
except IOError as ioe:
|
except IOError as ioe:
|
||||||
LOG.error(gettextutils._LE("Failed to store certificate."))
|
LOG.error(_LE("Failed to store certificate."))
|
||||||
raise exceptions.CertificateStorageException(message=ioe.message)
|
raise exceptions.CertificateStorageException(message=ioe.message)
|
||||||
|
|
||||||
return cert_ref
|
return cert_ref
|
||||||
@@ -88,7 +88,7 @@ class LocalCertManager(cert_mgr.CertManager):
|
|||||||
certificate data
|
certificate data
|
||||||
:raises CertificateStorageException: if certificate retrieval fails
|
:raises CertificateStorageException: if certificate retrieval fails
|
||||||
"""
|
"""
|
||||||
LOG.info(gettextutils._LI(
|
LOG.info(_LI(
|
||||||
"Loading certificate {0} from the local filesystem."
|
"Loading certificate {0} from the local filesystem."
|
||||||
).format(cert_ref))
|
).format(cert_ref))
|
||||||
|
|
||||||
@@ -105,7 +105,7 @@ class LocalCertManager(cert_mgr.CertManager):
|
|||||||
with open(filename_certificate, 'r') as cert_file:
|
with open(filename_certificate, 'r') as cert_file:
|
||||||
cert_data['certificate'] = cert_file.read()
|
cert_data['certificate'] = cert_file.read()
|
||||||
except IOError:
|
except IOError:
|
||||||
LOG.error(gettextutils._LE(
|
LOG.error(_LE(
|
||||||
"Failed to read certificate for {0}."
|
"Failed to read certificate for {0}."
|
||||||
).format(cert_ref))
|
).format(cert_ref))
|
||||||
raise exceptions.CertificateStorageException(
|
raise exceptions.CertificateStorageException(
|
||||||
@@ -115,7 +115,7 @@ class LocalCertManager(cert_mgr.CertManager):
|
|||||||
with open(filename_private_key, 'r') as key_file:
|
with open(filename_private_key, 'r') as key_file:
|
||||||
cert_data['private_key'] = key_file.read()
|
cert_data['private_key'] = key_file.read()
|
||||||
except IOError:
|
except IOError:
|
||||||
LOG.error(gettextutils._LE(
|
LOG.error(_LE(
|
||||||
"Failed to read private key for {0}."
|
"Failed to read private key for {0}."
|
||||||
).format(cert_ref))
|
).format(cert_ref))
|
||||||
raise exceptions.CertificateStorageException(
|
raise exceptions.CertificateStorageException(
|
||||||
@@ -144,7 +144,7 @@ class LocalCertManager(cert_mgr.CertManager):
|
|||||||
|
|
||||||
:raises CertificateStorageException: if certificate deletion fails
|
:raises CertificateStorageException: if certificate deletion fails
|
||||||
"""
|
"""
|
||||||
LOG.info(gettextutils._LI(
|
LOG.info(_LI(
|
||||||
"Deleting certificate {0} from the local filesystem."
|
"Deleting certificate {0} from the local filesystem."
|
||||||
).format(cert_ref))
|
).format(cert_ref))
|
||||||
|
|
||||||
@@ -161,7 +161,7 @@ class LocalCertManager(cert_mgr.CertManager):
|
|||||||
os.remove(filename_intermediates)
|
os.remove(filename_intermediates)
|
||||||
os.remove(filename_pkp)
|
os.remove(filename_pkp)
|
||||||
except IOError as ioe:
|
except IOError as ioe:
|
||||||
LOG.error(gettextutils._LE(
|
LOG.error(_LE(
|
||||||
"Failed to delete certificate {0}."
|
"Failed to delete certificate {0}."
|
||||||
).format(cert_ref))
|
).format(cert_ref))
|
||||||
raise exceptions.CertificateStorageException(message=ioe.message)
|
raise exceptions.CertificateStorageException(message=ioe.message)
|
||||||
|
|||||||
@@ -23,10 +23,10 @@ from oslo import messaging
|
|||||||
# from paste import deploy
|
# from paste import deploy
|
||||||
|
|
||||||
from octavia.common import utils
|
from octavia.common import utils
|
||||||
|
from octavia.i18n import _LI
|
||||||
from octavia.openstack.common import log as logging
|
from octavia.openstack.common import log as logging
|
||||||
from octavia import version
|
from octavia import version
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
core_opts = [
|
core_opts = [
|
||||||
@@ -113,7 +113,7 @@ def setup_logging(conf):
|
|||||||
"""
|
"""
|
||||||
product_name = "octavia"
|
product_name = "octavia"
|
||||||
logging.setup(product_name)
|
logging.setup(product_name)
|
||||||
LOG.info(_("Logging enabled!"))
|
LOG.info(_LI("Logging enabled!"))
|
||||||
|
|
||||||
|
|
||||||
# def load_paste_app(app_name):
|
# def load_paste_app(app_name):
|
||||||
|
|||||||
@@ -17,7 +17,7 @@
|
|||||||
Octavia base exception handling.
|
Octavia base exception handling.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from octavia.openstack.common import excutils
|
from oslo.utils import excutils
|
||||||
|
|
||||||
|
|
||||||
class OctaviaException(Exception):
|
class OctaviaException(Exception):
|
||||||
|
|||||||
@@ -26,7 +26,8 @@ import socket
|
|||||||
# from eventlet.green import subprocess
|
# from eventlet.green import subprocess
|
||||||
# from oslo.config import cfg
|
# from oslo.config import cfg
|
||||||
|
|
||||||
from octavia.openstack.common import excutils
|
from oslo.utils import excutils
|
||||||
|
|
||||||
from octavia.openstack.common import log as logging
|
from octavia.openstack.common import log as logging
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|||||||
0
octavia/hacking/__init__.py
Normal file
0
octavia/hacking/__init__.py
Normal file
128
octavia/hacking/checks.py
Normal file
128
octavia/hacking/checks.py
Normal file
@@ -0,0 +1,128 @@
|
|||||||
|
# Copyright (c) 2014 OpenStack Foundation.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
import re
|
||||||
|
|
||||||
|
import pep8
|
||||||
|
|
||||||
|
"""
|
||||||
|
Guidelines for writing new hacking checks
|
||||||
|
|
||||||
|
- Use only for Octavia specific tests. OpenStack general tests
|
||||||
|
should be submitted to the common 'hacking' module.
|
||||||
|
- Pick numbers in the range O3xx. Find the current test with
|
||||||
|
the highest allocated number and then pick the next value.
|
||||||
|
- Keep the test method code in the source file ordered based
|
||||||
|
on the O3xx value.
|
||||||
|
- List the new rule in the top level HACKING.rst file
|
||||||
|
- Add test cases for each new rule to
|
||||||
|
octavia/tests/unit/test_hacking.py
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
log_translation = re.compile(
|
||||||
|
r"(.)*LOG\.(audit|error|info|warn|warning|critical|exception)\(\s*('|\")")
|
||||||
|
author_tag_re = (re.compile("^\s*#\s*@?(a|A)uthor"),
|
||||||
|
re.compile("^\.\.\s+moduleauthor::"))
|
||||||
|
_all_hints = set(['_', '_LI', '_LE', '_LW', '_LC'])
|
||||||
|
_all_log_levels = {
|
||||||
|
# NOTE(yamamoto): Following nova which uses _() for audit.
|
||||||
|
'audit': '_',
|
||||||
|
'error': '_LE',
|
||||||
|
'info': '_LI',
|
||||||
|
'warn': '_LW',
|
||||||
|
'warning': '_LW',
|
||||||
|
'critical': '_LC',
|
||||||
|
'exception': '_LE',
|
||||||
|
}
|
||||||
|
log_translation_hints = []
|
||||||
|
for level, hint in _all_log_levels.iteritems():
|
||||||
|
r = "(.)*LOG\.%(level)s\(\s*((%(wrong_hints)s)\(|'|\")" % {
|
||||||
|
'level': level,
|
||||||
|
'wrong_hints': '|'.join(_all_hints - set([hint])),
|
||||||
|
}
|
||||||
|
log_translation_hints.append(re.compile(r))
|
||||||
|
|
||||||
|
|
||||||
|
def _directory_to_check_translation(filename):
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def validate_log_translations(logical_line, physical_line, filename):
|
||||||
|
# Translations are not required in the test directory
|
||||||
|
if "octavia/tests" in filename:
|
||||||
|
return
|
||||||
|
if pep8.noqa(physical_line):
|
||||||
|
return
|
||||||
|
msg = "O320: Log messages require translations!"
|
||||||
|
if log_translation.match(logical_line):
|
||||||
|
yield (0, msg)
|
||||||
|
|
||||||
|
if _directory_to_check_translation(filename):
|
||||||
|
msg = "O320: Log messages require translation hints!"
|
||||||
|
for log_translation_hint in log_translation_hints:
|
||||||
|
if log_translation_hint.match(logical_line):
|
||||||
|
yield (0, msg)
|
||||||
|
|
||||||
|
|
||||||
|
def use_jsonutils(logical_line, filename):
|
||||||
|
msg = "O321: jsonutils.%(fun)s must be used instead of json.%(fun)s"
|
||||||
|
|
||||||
|
# Some files in the tree are not meant to be run from inside Octavia
|
||||||
|
# itself, so we should not complain about them not using jsonutils
|
||||||
|
json_check_skipped_patterns = [
|
||||||
|
]
|
||||||
|
|
||||||
|
for pattern in json_check_skipped_patterns:
|
||||||
|
if pattern in filename:
|
||||||
|
return
|
||||||
|
|
||||||
|
if "json." in logical_line:
|
||||||
|
json_funcs = ['dumps(', 'dump(', 'loads(', 'load(']
|
||||||
|
for f in json_funcs:
|
||||||
|
pos = logical_line.find('json.%s' % f)
|
||||||
|
if pos != -1:
|
||||||
|
yield (pos, msg % {'fun': f[:-1]})
|
||||||
|
|
||||||
|
|
||||||
|
def no_author_tags(physical_line):
|
||||||
|
for regex in author_tag_re:
|
||||||
|
if regex.match(physical_line):
|
||||||
|
physical_line = physical_line.lower()
|
||||||
|
pos = physical_line.find('moduleauthor')
|
||||||
|
if pos < 0:
|
||||||
|
pos = physical_line.find('author')
|
||||||
|
return pos, "O322: Don't use author tags"
|
||||||
|
|
||||||
|
|
||||||
|
def no_translate_debug_logs(logical_line, filename):
|
||||||
|
"""Check for 'LOG.debug(_('
|
||||||
|
|
||||||
|
As per our translation policy,
|
||||||
|
https://wiki.openstack.org/wiki/LoggingStandards#Log_Translation
|
||||||
|
we shouldn't translate debug level logs.
|
||||||
|
|
||||||
|
* This check assumes that 'LOG' is a logger.
|
||||||
|
O319
|
||||||
|
"""
|
||||||
|
if _directory_to_check_translation(filename):
|
||||||
|
if logical_line.startswith("LOG.debug(_("):
|
||||||
|
yield(0, "O319 Don't translate debug level logs")
|
||||||
|
|
||||||
|
|
||||||
|
def factory(register):
|
||||||
|
register(validate_log_translations)
|
||||||
|
register(use_jsonutils)
|
||||||
|
register(no_author_tags)
|
||||||
|
register(no_translate_debug_logs)
|
||||||
30
octavia/i18n.py
Normal file
30
octavia/i18n.py
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
from oslo import i18n
|
||||||
|
|
||||||
|
_translators = i18n.TranslatorFactory(domain='octavia')
|
||||||
|
|
||||||
|
# The primary translation function using the well-known name "_"
|
||||||
|
_ = _translators.primary
|
||||||
|
|
||||||
|
# Translators for log levels.
|
||||||
|
#
|
||||||
|
# The abbreviated names are meant to reflect the usual use of a short
|
||||||
|
# name like '_'. The "L" is for "log" and the other letter comes from
|
||||||
|
# the level.
|
||||||
|
_LI = _translators.log_info
|
||||||
|
_LW = _translators.log_warning
|
||||||
|
_LE = _translators.log_error
|
||||||
|
_LC = _translators.log_critical
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
import six
|
|
||||||
|
|
||||||
|
|
||||||
six.add_move(six.MovedModule('mox', 'mox', 'mox3.mox'))
|
|
||||||
|
|||||||
45
octavia/openstack/common/_i18n.py
Normal file
45
octavia/openstack/common/_i18n.py
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
"""oslo.i18n integration module.
|
||||||
|
|
||||||
|
See http://docs.openstack.org/developer/oslo.i18n/usage.html
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
import oslo.i18n
|
||||||
|
|
||||||
|
# NOTE(dhellmann): This reference to o-s-l-o will be replaced by the
|
||||||
|
# application name when this module is synced into the separate
|
||||||
|
# repository. It is OK to have more than one translation function
|
||||||
|
# using the same domain, since there will still only be one message
|
||||||
|
# catalog.
|
||||||
|
_translators = oslo.i18n.TranslatorFactory(domain='octavia')
|
||||||
|
|
||||||
|
# The primary translation function using the well-known name "_"
|
||||||
|
_ = _translators.primary
|
||||||
|
|
||||||
|
# Translators for log levels.
|
||||||
|
#
|
||||||
|
# The abbreviated names are meant to reflect the usual use of a short
|
||||||
|
# name like '_'. The "L" is for "log" and the other letter comes from
|
||||||
|
# the level.
|
||||||
|
_LI = _translators.log_info
|
||||||
|
_LW = _translators.log_warning
|
||||||
|
_LE = _translators.log_error
|
||||||
|
_LC = _translators.log_critical
|
||||||
|
except ImportError:
|
||||||
|
# NOTE(dims): Support for cases where a project wants to use
|
||||||
|
# code from octavia-incubator, but is not ready to be internationalized
|
||||||
|
# (like tempest)
|
||||||
|
_ = _LI = _LW = _LE = _LC = lambda x: x
|
||||||
21
octavia/openstack/common/cache/cache.py
vendored
21
octavia/openstack/common/cache/cache.py
vendored
@@ -20,11 +20,25 @@ Supported configuration options:
|
|||||||
`key_namespace`: Namespace under which keys will be created.
|
`key_namespace`: Namespace under which keys will be created.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
########################################################################
|
||||||
|
#
|
||||||
|
# THIS MODULE IS DEPRECATED
|
||||||
|
#
|
||||||
|
# Please refer to
|
||||||
|
# https://etherpad.openstack.org/p/kilo-octavia-library-proposals for
|
||||||
|
# the discussion leading to this deprecation.
|
||||||
|
#
|
||||||
|
# We recommend helping with the new octavia.cache library being created
|
||||||
|
# as a wrapper for dogpile.
|
||||||
|
#
|
||||||
|
########################################################################
|
||||||
|
|
||||||
|
|
||||||
from six.moves.urllib import parse
|
from six.moves.urllib import parse
|
||||||
from stevedore import driver
|
from stevedore import driver
|
||||||
|
|
||||||
|
|
||||||
def _get_olso_configs():
|
def _get_oslo.configs():
|
||||||
"""Returns the oslo.config options to register."""
|
"""Returns the oslo.config options to register."""
|
||||||
# NOTE(flaper87): Oslo config should be
|
# NOTE(flaper87): Oslo config should be
|
||||||
# optional. Instead of doing try / except
|
# optional. Instead of doing try / except
|
||||||
@@ -45,7 +59,7 @@ def register_oslo.configs(conf):
|
|||||||
:params conf: Config object.
|
:params conf: Config object.
|
||||||
:type conf: `cfg.ConfigOptions`
|
:type conf: `cfg.ConfigOptions`
|
||||||
"""
|
"""
|
||||||
conf.register_opts(_get_olso_configs())
|
conf.register_opts(_get_oslo.configs())
|
||||||
|
|
||||||
|
|
||||||
def get_cache(url='memory://'):
|
def get_cache(url='memory://'):
|
||||||
@@ -71,8 +85,7 @@ def get_cache(url='memory://'):
|
|||||||
parameters = parse.parse_qsl(query)
|
parameters = parse.parse_qsl(query)
|
||||||
kwargs = {'options': dict(parameters)}
|
kwargs = {'options': dict(parameters)}
|
||||||
|
|
||||||
mgr = driver.DriverManager('octavia.openstack.common.cache.backends',
|
mgr = driver.DriverManager('octavia.openstack.common.cache.backends', backend,
|
||||||
backend,
|
|
||||||
invoke_on_load=True,
|
invoke_on_load=True,
|
||||||
invoke_args=[parsed],
|
invoke_args=[parsed],
|
||||||
invoke_kwds=kwargs)
|
invoke_kwds=kwargs)
|
||||||
|
|||||||
@@ -117,10 +117,6 @@ def get_context_from_function_and_args(function, args, kwargs):
|
|||||||
|
|
||||||
def is_user_context(context):
|
def is_user_context(context):
|
||||||
"""Indicates if the request context is a normal user."""
|
"""Indicates if the request context is a normal user."""
|
||||||
if not context:
|
if not context or context.is_admin:
|
||||||
return False
|
return False
|
||||||
if context.is_admin:
|
return context.user_id and context.project_id
|
||||||
return False
|
|
||||||
if not context.user_id or not context.project_id:
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|||||||
@@ -16,6 +16,7 @@
|
|||||||
|
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import copy
|
||||||
import errno
|
import errno
|
||||||
import gc
|
import gc
|
||||||
import os
|
import os
|
||||||
@@ -29,7 +30,7 @@ import eventlet.backdoor
|
|||||||
import greenlet
|
import greenlet
|
||||||
from oslo.config import cfg
|
from oslo.config import cfg
|
||||||
|
|
||||||
from octavia.openstack.common.gettextutils import _LI
|
from octavia.openstack.common._i18n import _LI
|
||||||
from octavia.openstack.common import log as logging
|
from octavia.openstack.common import log as logging
|
||||||
|
|
||||||
help_for_backdoor_port = (
|
help_for_backdoor_port = (
|
||||||
@@ -49,6 +50,12 @@ CONF.register_opts(eventlet_backdoor_opts)
|
|||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def list_opts():
|
||||||
|
"""Entry point for oslo.config-generator.
|
||||||
|
"""
|
||||||
|
return [(None, copy.deepcopy(eventlet_backdoor_opts))]
|
||||||
|
|
||||||
|
|
||||||
class EventletBackdoorConfigValueError(Exception):
|
class EventletBackdoorConfigValueError(Exception):
|
||||||
def __init__(self, port_range, help_msg, ex):
|
def __init__(self, port_range, help_msg, ex):
|
||||||
msg = ('Invalid backdoor_port configuration %(range)s: %(ex)s. '
|
msg = ('Invalid backdoor_port configuration %(range)s: %(ex)s. '
|
||||||
|
|||||||
@@ -1,113 +0,0 @@
|
|||||||
# Copyright 2011 OpenStack Foundation.
|
|
||||||
# Copyright 2012, Red Hat, Inc.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
"""
|
|
||||||
Exception related utilities.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import sys
|
|
||||||
import time
|
|
||||||
import traceback
|
|
||||||
|
|
||||||
import six
|
|
||||||
|
|
||||||
from octavia.openstack.common.gettextutils import _LE
|
|
||||||
|
|
||||||
|
|
||||||
class save_and_reraise_exception(object):
|
|
||||||
"""Save current exception, run some code and then re-raise.
|
|
||||||
|
|
||||||
In some cases the exception context can be cleared, resulting in None
|
|
||||||
being attempted to be re-raised after an exception handler is run. This
|
|
||||||
can happen when eventlet switches greenthreads or when running an
|
|
||||||
exception handler, code raises and catches an exception. In both
|
|
||||||
cases the exception context will be cleared.
|
|
||||||
|
|
||||||
To work around this, we save the exception state, run handler code, and
|
|
||||||
then re-raise the original exception. If another exception occurs, the
|
|
||||||
saved exception is logged and the new exception is re-raised.
|
|
||||||
|
|
||||||
In some cases the caller may not want to re-raise the exception, and
|
|
||||||
for those circumstances this context provides a reraise flag that
|
|
||||||
can be used to suppress the exception. For example::
|
|
||||||
|
|
||||||
except Exception:
|
|
||||||
with save_and_reraise_exception() as ctxt:
|
|
||||||
decide_if_need_reraise()
|
|
||||||
if not should_be_reraised:
|
|
||||||
ctxt.reraise = False
|
|
||||||
|
|
||||||
If another exception occurs and reraise flag is False,
|
|
||||||
the saved exception will not be logged.
|
|
||||||
|
|
||||||
If the caller wants to raise new exception during exception handling
|
|
||||||
he/she sets reraise to False initially with an ability to set it back to
|
|
||||||
True if needed::
|
|
||||||
|
|
||||||
except Exception:
|
|
||||||
with save_and_reraise_exception(reraise=False) as ctxt:
|
|
||||||
[if statements to determine whether to raise a new exception]
|
|
||||||
# Not raising a new exception, so reraise
|
|
||||||
ctxt.reraise = True
|
|
||||||
"""
|
|
||||||
def __init__(self, reraise=True):
|
|
||||||
self.reraise = reraise
|
|
||||||
|
|
||||||
def __enter__(self):
|
|
||||||
self.type_, self.value, self.tb, = sys.exc_info()
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
||||||
if exc_type is not None:
|
|
||||||
if self.reraise:
|
|
||||||
logging.error(_LE('Original exception being dropped: %s'),
|
|
||||||
traceback.format_exception(self.type_,
|
|
||||||
self.value,
|
|
||||||
self.tb))
|
|
||||||
return False
|
|
||||||
if self.reraise:
|
|
||||||
six.reraise(self.type_, self.value, self.tb)
|
|
||||||
|
|
||||||
|
|
||||||
def forever_retry_uncaught_exceptions(infunc):
|
|
||||||
def inner_func(*args, **kwargs):
|
|
||||||
last_log_time = 0
|
|
||||||
last_exc_message = None
|
|
||||||
exc_count = 0
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
return infunc(*args, **kwargs)
|
|
||||||
except Exception as exc:
|
|
||||||
this_exc_message = six.u(str(exc))
|
|
||||||
if this_exc_message == last_exc_message:
|
|
||||||
exc_count += 1
|
|
||||||
else:
|
|
||||||
exc_count = 1
|
|
||||||
# Do not log any more frequently than once a minute unless
|
|
||||||
# the exception message changes
|
|
||||||
cur_time = int(time.time())
|
|
||||||
if (cur_time - last_log_time > 60 or
|
|
||||||
this_exc_message != last_exc_message):
|
|
||||||
logging.exception(
|
|
||||||
_LE('Unexpected exception occurred %d time(s)... '
|
|
||||||
'retrying.') % exc_count)
|
|
||||||
last_log_time = cur_time
|
|
||||||
last_exc_message = this_exc_message
|
|
||||||
exc_count = 0
|
|
||||||
# This should be a very rare event. In case it isn't, do
|
|
||||||
# a sleep.
|
|
||||||
time.sleep(1)
|
|
||||||
return inner_func
|
|
||||||
@@ -15,11 +15,11 @@
|
|||||||
|
|
||||||
import contextlib
|
import contextlib
|
||||||
import errno
|
import errno
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
from octavia.openstack.common import excutils
|
from oslo.utils import excutils
|
||||||
from octavia.openstack.common import log as logging
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1,17 @@
|
|||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
|
|
||||||
|
six.add_move(six.MovedModule('mox', 'mox', 'mox3.mox'))
|
||||||
|
|||||||
@@ -1,479 +0,0 @@
|
|||||||
# Copyright 2012 Red Hat, Inc.
|
|
||||||
# Copyright 2013 IBM Corp.
|
|
||||||
# All Rights Reserved.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
"""
|
|
||||||
gettext for openstack-common modules.
|
|
||||||
|
|
||||||
Usual usage in an openstack.common module:
|
|
||||||
|
|
||||||
from octavia.openstack.common.gettextutils import _
|
|
||||||
"""
|
|
||||||
|
|
||||||
import copy
|
|
||||||
import gettext
|
|
||||||
import locale
|
|
||||||
from logging import handlers
|
|
||||||
import os
|
|
||||||
|
|
||||||
from babel import localedata
|
|
||||||
import six
|
|
||||||
|
|
||||||
_AVAILABLE_LANGUAGES = {}
|
|
||||||
|
|
||||||
# FIXME(dhellmann): Remove this when moving to oslo.i18n.
|
|
||||||
USE_LAZY = False
|
|
||||||
|
|
||||||
|
|
||||||
class TranslatorFactory(object):
|
|
||||||
"""Create translator functions
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, domain, localedir=None):
|
|
||||||
"""Establish a set of translation functions for the domain.
|
|
||||||
|
|
||||||
:param domain: Name of translation domain,
|
|
||||||
specifying a message catalog.
|
|
||||||
:type domain: str
|
|
||||||
:param lazy: Delays translation until a message is emitted.
|
|
||||||
Defaults to False.
|
|
||||||
:type lazy: Boolean
|
|
||||||
:param localedir: Directory with translation catalogs.
|
|
||||||
:type localedir: str
|
|
||||||
"""
|
|
||||||
self.domain = domain
|
|
||||||
if localedir is None:
|
|
||||||
localedir = os.environ.get(domain.upper() + '_LOCALEDIR')
|
|
||||||
self.localedir = localedir
|
|
||||||
|
|
||||||
def _make_translation_func(self, domain=None):
|
|
||||||
"""Return a new translation function ready for use.
|
|
||||||
|
|
||||||
Takes into account whether or not lazy translation is being
|
|
||||||
done.
|
|
||||||
|
|
||||||
The domain can be specified to override the default from the
|
|
||||||
factory, but the localedir from the factory is always used
|
|
||||||
because we assume the log-level translation catalogs are
|
|
||||||
installed in the same directory as the main application
|
|
||||||
catalog.
|
|
||||||
|
|
||||||
"""
|
|
||||||
if domain is None:
|
|
||||||
domain = self.domain
|
|
||||||
t = gettext.translation(domain,
|
|
||||||
localedir=self.localedir,
|
|
||||||
fallback=True)
|
|
||||||
# Use the appropriate method of the translation object based
|
|
||||||
# on the python version.
|
|
||||||
m = t.gettext if six.PY3 else t.ugettext
|
|
||||||
|
|
||||||
def f(msg):
|
|
||||||
"""oslo.i18n.gettextutils translation function."""
|
|
||||||
if USE_LAZY:
|
|
||||||
return Message(msg, domain=domain)
|
|
||||||
return m(msg)
|
|
||||||
return f
|
|
||||||
|
|
||||||
@property
|
|
||||||
def primary(self):
|
|
||||||
"The default translation function."
|
|
||||||
return self._make_translation_func()
|
|
||||||
|
|
||||||
def _make_log_translation_func(self, level):
|
|
||||||
return self._make_translation_func(self.domain + '-log-' + level)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def log_info(self):
|
|
||||||
"Translate info-level log messages."
|
|
||||||
return self._make_log_translation_func('info')
|
|
||||||
|
|
||||||
@property
|
|
||||||
def log_warning(self):
|
|
||||||
"Translate warning-level log messages."
|
|
||||||
return self._make_log_translation_func('warning')
|
|
||||||
|
|
||||||
@property
|
|
||||||
def log_error(self):
|
|
||||||
"Translate error-level log messages."
|
|
||||||
return self._make_log_translation_func('error')
|
|
||||||
|
|
||||||
@property
|
|
||||||
def log_critical(self):
|
|
||||||
"Translate critical-level log messages."
|
|
||||||
return self._make_log_translation_func('critical')
|
|
||||||
|
|
||||||
|
|
||||||
# NOTE(dhellmann): When this module moves out of the incubator into
|
|
||||||
# oslo.i18n, these global variables can be moved to an integration
|
|
||||||
# module within each application.
|
|
||||||
|
|
||||||
# Create the global translation functions.
|
|
||||||
_translators = TranslatorFactory('octavia')
|
|
||||||
|
|
||||||
# The primary translation function using the well-known name "_"
|
|
||||||
_ = _translators.primary
|
|
||||||
|
|
||||||
# Translators for log levels.
|
|
||||||
#
|
|
||||||
# The abbreviated names are meant to reflect the usual use of a short
|
|
||||||
# name like '_'. The "L" is for "log" and the other letter comes from
|
|
||||||
# the level.
|
|
||||||
_LI = _translators.log_info
|
|
||||||
_LW = _translators.log_warning
|
|
||||||
_LE = _translators.log_error
|
|
||||||
_LC = _translators.log_critical
|
|
||||||
|
|
||||||
# NOTE(dhellmann): End of globals that will move to the application's
|
|
||||||
# integration module.
|
|
||||||
|
|
||||||
|
|
||||||
def enable_lazy():
|
|
||||||
"""Convenience function for configuring _() to use lazy gettext
|
|
||||||
|
|
||||||
Call this at the start of execution to enable the gettextutils._
|
|
||||||
function to use lazy gettext functionality. This is useful if
|
|
||||||
your project is importing _ directly instead of using the
|
|
||||||
gettextutils.install() way of importing the _ function.
|
|
||||||
"""
|
|
||||||
global USE_LAZY
|
|
||||||
USE_LAZY = True
|
|
||||||
|
|
||||||
|
|
||||||
def install(domain):
|
|
||||||
"""Install a _() function using the given translation domain.
|
|
||||||
|
|
||||||
Given a translation domain, install a _() function using gettext's
|
|
||||||
install() function.
|
|
||||||
|
|
||||||
The main difference from gettext.install() is that we allow
|
|
||||||
overriding the default localedir (e.g. /usr/share/locale) using
|
|
||||||
a translation-domain-specific environment variable (e.g.
|
|
||||||
NOVA_LOCALEDIR).
|
|
||||||
|
|
||||||
Note that to enable lazy translation, enable_lazy must be
|
|
||||||
called.
|
|
||||||
|
|
||||||
:param domain: the translation domain
|
|
||||||
"""
|
|
||||||
from six import moves
|
|
||||||
tf = TranslatorFactory(domain)
|
|
||||||
moves.builtins.__dict__['_'] = tf.primary
|
|
||||||
|
|
||||||
|
|
||||||
class Message(six.text_type):
|
|
||||||
"""A Message object is a unicode object that can be translated.
|
|
||||||
|
|
||||||
Translation of Message is done explicitly using the translate() method.
|
|
||||||
For all non-translation intents and purposes, a Message is simply unicode,
|
|
||||||
and can be treated as such.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __new__(cls, msgid, msgtext=None, params=None,
|
|
||||||
domain='octavia', *args):
|
|
||||||
"""Create a new Message object.
|
|
||||||
|
|
||||||
In order for translation to work gettext requires a message ID, this
|
|
||||||
msgid will be used as the base unicode text. It is also possible
|
|
||||||
for the msgid and the base unicode text to be different by passing
|
|
||||||
the msgtext parameter.
|
|
||||||
"""
|
|
||||||
# If the base msgtext is not given, we use the default translation
|
|
||||||
# of the msgid (which is in English) just in case the system locale is
|
|
||||||
# not English, so that the base text will be in that locale by default.
|
|
||||||
if not msgtext:
|
|
||||||
msgtext = Message._translate_msgid(msgid, domain)
|
|
||||||
# We want to initialize the parent unicode with the actual object that
|
|
||||||
# would have been plain unicode if 'Message' was not enabled.
|
|
||||||
msg = super(Message, cls).__new__(cls, msgtext)
|
|
||||||
msg.msgid = msgid
|
|
||||||
msg.domain = domain
|
|
||||||
msg.params = params
|
|
||||||
return msg
|
|
||||||
|
|
||||||
def translate(self, desired_locale=None):
|
|
||||||
"""Translate this message to the desired locale.
|
|
||||||
|
|
||||||
:param desired_locale: The desired locale to translate the message to,
|
|
||||||
if no locale is provided the message will be
|
|
||||||
translated to the system's default locale.
|
|
||||||
|
|
||||||
:returns: the translated message in unicode
|
|
||||||
"""
|
|
||||||
|
|
||||||
translated_message = Message._translate_msgid(self.msgid,
|
|
||||||
self.domain,
|
|
||||||
desired_locale)
|
|
||||||
if self.params is None:
|
|
||||||
# No need for more translation
|
|
||||||
return translated_message
|
|
||||||
|
|
||||||
# This Message object may have been formatted with one or more
|
|
||||||
# Message objects as substitution arguments, given either as a single
|
|
||||||
# argument, part of a tuple, or as one or more values in a dictionary.
|
|
||||||
# When translating this Message we need to translate those Messages too
|
|
||||||
translated_params = _translate_args(self.params, desired_locale)
|
|
||||||
|
|
||||||
translated_message = translated_message % translated_params
|
|
||||||
|
|
||||||
return translated_message
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _translate_msgid(msgid, domain, desired_locale=None):
|
|
||||||
if not desired_locale:
|
|
||||||
system_locale = locale.getdefaultlocale()
|
|
||||||
# If the system locale is not available to the runtime use English
|
|
||||||
if not system_locale[0]:
|
|
||||||
desired_locale = 'en_US'
|
|
||||||
else:
|
|
||||||
desired_locale = system_locale[0]
|
|
||||||
|
|
||||||
locale_dir = os.environ.get(domain.upper() + '_LOCALEDIR')
|
|
||||||
lang = gettext.translation(domain,
|
|
||||||
localedir=locale_dir,
|
|
||||||
languages=[desired_locale],
|
|
||||||
fallback=True)
|
|
||||||
if six.PY3:
|
|
||||||
translator = lang.gettext
|
|
||||||
else:
|
|
||||||
translator = lang.ugettext
|
|
||||||
|
|
||||||
translated_message = translator(msgid)
|
|
||||||
return translated_message
|
|
||||||
|
|
||||||
def __mod__(self, other):
|
|
||||||
# When we mod a Message we want the actual operation to be performed
|
|
||||||
# by the parent class (i.e. unicode()), the only thing we do here is
|
|
||||||
# save the original msgid and the parameters in case of a translation
|
|
||||||
params = self._sanitize_mod_params(other)
|
|
||||||
unicode_mod = super(Message, self).__mod__(params)
|
|
||||||
modded = Message(self.msgid,
|
|
||||||
msgtext=unicode_mod,
|
|
||||||
params=params,
|
|
||||||
domain=self.domain)
|
|
||||||
return modded
|
|
||||||
|
|
||||||
def _sanitize_mod_params(self, other):
|
|
||||||
"""Sanitize the object being modded with this Message.
|
|
||||||
|
|
||||||
- Add support for modding 'None' so translation supports it
|
|
||||||
- Trim the modded object, which can be a large dictionary, to only
|
|
||||||
those keys that would actually be used in a translation
|
|
||||||
- Snapshot the object being modded, in case the message is
|
|
||||||
translated, it will be used as it was when the Message was created
|
|
||||||
"""
|
|
||||||
if other is None:
|
|
||||||
params = (other,)
|
|
||||||
elif isinstance(other, dict):
|
|
||||||
# Merge the dictionaries
|
|
||||||
# Copy each item in case one does not support deep copy.
|
|
||||||
params = {}
|
|
||||||
if isinstance(self.params, dict):
|
|
||||||
for key, val in self.params.items():
|
|
||||||
params[key] = self._copy_param(val)
|
|
||||||
for key, val in other.items():
|
|
||||||
params[key] = self._copy_param(val)
|
|
||||||
else:
|
|
||||||
params = self._copy_param(other)
|
|
||||||
return params
|
|
||||||
|
|
||||||
def _copy_param(self, param):
|
|
||||||
try:
|
|
||||||
return copy.deepcopy(param)
|
|
||||||
except Exception:
|
|
||||||
# Fallback to casting to unicode this will handle the
|
|
||||||
# python code-like objects that can't be deep-copied
|
|
||||||
return six.text_type(param)
|
|
||||||
|
|
||||||
def __add__(self, other):
|
|
||||||
msg = _('Message objects do not support addition.')
|
|
||||||
raise TypeError(msg)
|
|
||||||
|
|
||||||
def __radd__(self, other):
|
|
||||||
return self.__add__(other)
|
|
||||||
|
|
||||||
if six.PY2:
|
|
||||||
def __str__(self):
|
|
||||||
# NOTE(luisg): Logging in python 2.6 tries to str() log records,
|
|
||||||
# and it expects specifically a UnicodeError in order to proceed.
|
|
||||||
msg = _('Message objects do not support str() because they may '
|
|
||||||
'contain non-ascii characters. '
|
|
||||||
'Please use unicode() or translate() instead.')
|
|
||||||
raise UnicodeError(msg)
|
|
||||||
|
|
||||||
|
|
||||||
def get_available_languages(domain):
|
|
||||||
"""Lists the available languages for the given translation domain.
|
|
||||||
|
|
||||||
:param domain: the domain to get languages for
|
|
||||||
"""
|
|
||||||
if domain in _AVAILABLE_LANGUAGES:
|
|
||||||
return copy.copy(_AVAILABLE_LANGUAGES[domain])
|
|
||||||
|
|
||||||
localedir = '%s_LOCALEDIR' % domain.upper()
|
|
||||||
find = lambda x: gettext.find(domain,
|
|
||||||
localedir=os.environ.get(localedir),
|
|
||||||
languages=[x])
|
|
||||||
|
|
||||||
# NOTE(mrodden): en_US should always be available (and first in case
|
|
||||||
# order matters) since our in-line message strings are en_US
|
|
||||||
language_list = ['en_US']
|
|
||||||
# NOTE(luisg): Babel <1.0 used a function called list(), which was
|
|
||||||
# renamed to locale_identifiers() in >=1.0, the requirements master list
|
|
||||||
# requires >=0.9.6, uncapped, so defensively work with both. We can remove
|
|
||||||
# this check when the master list updates to >=1.0, and update all projects
|
|
||||||
list_identifiers = (getattr(localedata, 'list', None) or
|
|
||||||
getattr(localedata, 'locale_identifiers'))
|
|
||||||
locale_identifiers = list_identifiers()
|
|
||||||
|
|
||||||
for i in locale_identifiers:
|
|
||||||
if find(i) is not None:
|
|
||||||
language_list.append(i)
|
|
||||||
|
|
||||||
# NOTE(luisg): Babel>=1.0,<1.3 has a bug where some OpenStack supported
|
|
||||||
# locales (e.g. 'zh_CN', and 'zh_TW') aren't supported even though they
|
|
||||||
# are perfectly legitimate locales:
|
|
||||||
# https://github.com/mitsuhiko/babel/issues/37
|
|
||||||
# In Babel 1.3 they fixed the bug and they support these locales, but
|
|
||||||
# they are still not explicitly "listed" by locale_identifiers().
|
|
||||||
# That is why we add the locales here explicitly if necessary so that
|
|
||||||
# they are listed as supported.
|
|
||||||
aliases = {'zh': 'zh_CN',
|
|
||||||
'zh_Hant_HK': 'zh_HK',
|
|
||||||
'zh_Hant': 'zh_TW',
|
|
||||||
'fil': 'tl_PH'}
|
|
||||||
for (locale_, alias) in six.iteritems(aliases):
|
|
||||||
if locale_ in language_list and alias not in language_list:
|
|
||||||
language_list.append(alias)
|
|
||||||
|
|
||||||
_AVAILABLE_LANGUAGES[domain] = language_list
|
|
||||||
return copy.copy(language_list)
|
|
||||||
|
|
||||||
|
|
||||||
def translate(obj, desired_locale=None):
|
|
||||||
"""Gets the translated unicode representation of the given object.
|
|
||||||
|
|
||||||
If the object is not translatable it is returned as-is.
|
|
||||||
If the locale is None the object is translated to the system locale.
|
|
||||||
|
|
||||||
:param obj: the object to translate
|
|
||||||
:param desired_locale: the locale to translate the message to, if None the
|
|
||||||
default system locale will be used
|
|
||||||
:returns: the translated object in unicode, or the original object if
|
|
||||||
it could not be translated
|
|
||||||
"""
|
|
||||||
message = obj
|
|
||||||
if not isinstance(message, Message):
|
|
||||||
# If the object to translate is not already translatable,
|
|
||||||
# let's first get its unicode representation
|
|
||||||
message = six.text_type(obj)
|
|
||||||
if isinstance(message, Message):
|
|
||||||
# Even after unicoding() we still need to check if we are
|
|
||||||
# running with translatable unicode before translating
|
|
||||||
return message.translate(desired_locale)
|
|
||||||
return obj
|
|
||||||
|
|
||||||
|
|
||||||
def _translate_args(args, desired_locale=None):
|
|
||||||
"""Translates all the translatable elements of the given arguments object.
|
|
||||||
|
|
||||||
This method is used for translating the translatable values in method
|
|
||||||
arguments which include values of tuples or dictionaries.
|
|
||||||
If the object is not a tuple or a dictionary the object itself is
|
|
||||||
translated if it is translatable.
|
|
||||||
|
|
||||||
If the locale is None the object is translated to the system locale.
|
|
||||||
|
|
||||||
:param args: the args to translate
|
|
||||||
:param desired_locale: the locale to translate the args to, if None the
|
|
||||||
default system locale will be used
|
|
||||||
:returns: a new args object with the translated contents of the original
|
|
||||||
"""
|
|
||||||
if isinstance(args, tuple):
|
|
||||||
return tuple(translate(v, desired_locale) for v in args)
|
|
||||||
if isinstance(args, dict):
|
|
||||||
translated_dict = {}
|
|
||||||
for (k, v) in six.iteritems(args):
|
|
||||||
translated_v = translate(v, desired_locale)
|
|
||||||
translated_dict[k] = translated_v
|
|
||||||
return translated_dict
|
|
||||||
return translate(args, desired_locale)
|
|
||||||
|
|
||||||
|
|
||||||
class TranslationHandler(handlers.MemoryHandler):
|
|
||||||
"""Handler that translates records before logging them.
|
|
||||||
|
|
||||||
The TranslationHandler takes a locale and a target logging.Handler object
|
|
||||||
to forward LogRecord objects to after translating them. This handler
|
|
||||||
depends on Message objects being logged, instead of regular strings.
|
|
||||||
|
|
||||||
The handler can be configured declaratively in the logging.conf as follows:
|
|
||||||
|
|
||||||
[handlers]
|
|
||||||
keys = translatedlog, translator
|
|
||||||
|
|
||||||
[handler_translatedlog]
|
|
||||||
class = handlers.WatchedFileHandler
|
|
||||||
args = ('/var/log/api-localized.log',)
|
|
||||||
formatter = context
|
|
||||||
|
|
||||||
[handler_translator]
|
|
||||||
class = openstack.common.log.TranslationHandler
|
|
||||||
target = translatedlog
|
|
||||||
args = ('zh_CN',)
|
|
||||||
|
|
||||||
If the specified locale is not available in the system, the handler will
|
|
||||||
log in the default locale.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, locale=None, target=None):
|
|
||||||
"""Initialize a TranslationHandler
|
|
||||||
|
|
||||||
:param locale: locale to use for translating messages
|
|
||||||
:param target: logging.Handler object to forward
|
|
||||||
LogRecord objects to after translation
|
|
||||||
"""
|
|
||||||
# NOTE(luisg): In order to allow this handler to be a wrapper for
|
|
||||||
# other handlers, such as a FileHandler, and still be able to
|
|
||||||
# configure it using logging.conf, this handler has to extend
|
|
||||||
# MemoryHandler because only the MemoryHandlers' logging.conf
|
|
||||||
# parsing is implemented such that it accepts a target handler.
|
|
||||||
handlers.MemoryHandler.__init__(self, capacity=0, target=target)
|
|
||||||
self.locale = locale
|
|
||||||
|
|
||||||
def setFormatter(self, fmt):
|
|
||||||
self.target.setFormatter(fmt)
|
|
||||||
|
|
||||||
def emit(self, record):
|
|
||||||
# We save the message from the original record to restore it
|
|
||||||
# after translation, so other handlers are not affected by this
|
|
||||||
original_msg = record.msg
|
|
||||||
original_args = record.args
|
|
||||||
|
|
||||||
try:
|
|
||||||
self._translate_and_log_record(record)
|
|
||||||
finally:
|
|
||||||
record.msg = original_msg
|
|
||||||
record.args = original_args
|
|
||||||
|
|
||||||
def _translate_and_log_record(self, record):
|
|
||||||
record.msg = translate(record.msg, self.locale)
|
|
||||||
|
|
||||||
# In addition to translating the message, we also need to translate
|
|
||||||
# arguments that were passed to the log method that were not part
|
|
||||||
# of the main message e.g., log.info(_('Some message %s'), this_one))
|
|
||||||
record.args = _translate_args(record.args, self.locale)
|
|
||||||
|
|
||||||
self.target.emit(record)
|
|
||||||
@@ -1,73 +0,0 @@
|
|||||||
# Copyright 2011 OpenStack Foundation.
|
|
||||||
# All Rights Reserved.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
"""
|
|
||||||
Import related utilities and helper functions.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import traceback
|
|
||||||
|
|
||||||
|
|
||||||
def import_class(import_str):
|
|
||||||
"""Returns a class from a string including module and class."""
|
|
||||||
mod_str, _sep, class_str = import_str.rpartition('.')
|
|
||||||
__import__(mod_str)
|
|
||||||
try:
|
|
||||||
return getattr(sys.modules[mod_str], class_str)
|
|
||||||
except AttributeError:
|
|
||||||
raise ImportError('Class %s cannot be found (%s)' %
|
|
||||||
(class_str,
|
|
||||||
traceback.format_exception(*sys.exc_info())))
|
|
||||||
|
|
||||||
|
|
||||||
def import_object(import_str, *args, **kwargs):
|
|
||||||
"""Import a class and return an instance of it."""
|
|
||||||
return import_class(import_str)(*args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def import_object_ns(name_space, import_str, *args, **kwargs):
|
|
||||||
"""Tries to import object from default namespace.
|
|
||||||
|
|
||||||
Imports a class and return an instance of it, first by trying
|
|
||||||
to find the class in a default namespace, then failing back to
|
|
||||||
a full path if not found in the default namespace.
|
|
||||||
"""
|
|
||||||
import_value = "%s.%s" % (name_space, import_str)
|
|
||||||
try:
|
|
||||||
return import_class(import_value)(*args, **kwargs)
|
|
||||||
except ImportError:
|
|
||||||
return import_class(import_str)(*args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def import_module(import_str):
|
|
||||||
"""Import a module."""
|
|
||||||
__import__(import_str)
|
|
||||||
return sys.modules[import_str]
|
|
||||||
|
|
||||||
|
|
||||||
def import_versioned_module(version, submodule=None):
|
|
||||||
module = 'octavia.v%s' % version
|
|
||||||
if submodule:
|
|
||||||
module = '.'.join((module, submodule))
|
|
||||||
return import_module(module)
|
|
||||||
|
|
||||||
|
|
||||||
def try_import(import_str, default=None):
|
|
||||||
"""Try to import a module and if it fails return default."""
|
|
||||||
try:
|
|
||||||
return import_module(import_str)
|
|
||||||
except ImportError:
|
|
||||||
return default
|
|
||||||
@@ -1,196 +0,0 @@
|
|||||||
# Copyright 2010 United States Government as represented by the
|
|
||||||
# Administrator of the National Aeronautics and Space Administration.
|
|
||||||
# Copyright 2011 Justin Santa Barbara
|
|
||||||
# All Rights Reserved.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
'''
|
|
||||||
JSON related utilities.
|
|
||||||
|
|
||||||
This module provides a few things:
|
|
||||||
|
|
||||||
1) A handy function for getting an object down to something that can be
|
|
||||||
JSON serialized. See to_primitive().
|
|
||||||
|
|
||||||
2) Wrappers around loads() and dumps(). The dumps() wrapper will
|
|
||||||
automatically use to_primitive() for you if needed.
|
|
||||||
|
|
||||||
3) This sets up anyjson to use the loads() and dumps() wrappers if anyjson
|
|
||||||
is available.
|
|
||||||
'''
|
|
||||||
|
|
||||||
|
|
||||||
import codecs
|
|
||||||
import datetime
|
|
||||||
import functools
|
|
||||||
import inspect
|
|
||||||
import itertools
|
|
||||||
import sys
|
|
||||||
|
|
||||||
is_simplejson = False
|
|
||||||
if sys.version_info < (2, 7):
|
|
||||||
# On Python <= 2.6, json module is not C boosted, so try to use
|
|
||||||
# simplejson module if available
|
|
||||||
try:
|
|
||||||
import simplejson as json
|
|
||||||
is_simplejson = True
|
|
||||||
except ImportError:
|
|
||||||
import json
|
|
||||||
else:
|
|
||||||
import json
|
|
||||||
|
|
||||||
import six
|
|
||||||
import six.moves.xmlrpc_client as xmlrpclib
|
|
||||||
|
|
||||||
from octavia.openstack.common import gettextutils
|
|
||||||
from octavia.openstack.common import importutils
|
|
||||||
from octavia.openstack.common import strutils
|
|
||||||
from octavia.openstack.common import timeutils
|
|
||||||
|
|
||||||
netaddr = importutils.try_import("netaddr")
|
|
||||||
|
|
||||||
_nasty_type_tests = [inspect.ismodule, inspect.isclass, inspect.ismethod,
|
|
||||||
inspect.isfunction, inspect.isgeneratorfunction,
|
|
||||||
inspect.isgenerator, inspect.istraceback, inspect.isframe,
|
|
||||||
inspect.iscode, inspect.isbuiltin, inspect.isroutine,
|
|
||||||
inspect.isabstract]
|
|
||||||
|
|
||||||
_simple_types = (six.string_types + six.integer_types
|
|
||||||
+ (type(None), bool, float))
|
|
||||||
|
|
||||||
|
|
||||||
def to_primitive(value, convert_instances=False, convert_datetime=True,
|
|
||||||
level=0, max_depth=3):
|
|
||||||
"""Convert a complex object into primitives.
|
|
||||||
|
|
||||||
Handy for JSON serialization. We can optionally handle instances,
|
|
||||||
but since this is a recursive function, we could have cyclical
|
|
||||||
data structures.
|
|
||||||
|
|
||||||
To handle cyclical data structures we could track the actual objects
|
|
||||||
visited in a set, but not all objects are hashable. Instead we just
|
|
||||||
track the depth of the object inspections and don't go too deep.
|
|
||||||
|
|
||||||
Therefore, convert_instances=True is lossy ... be aware.
|
|
||||||
|
|
||||||
"""
|
|
||||||
# handle obvious types first - order of basic types determined by running
|
|
||||||
# full tests on nova project, resulting in the following counts:
|
|
||||||
# 572754 <type 'NoneType'>
|
|
||||||
# 460353 <type 'int'>
|
|
||||||
# 379632 <type 'unicode'>
|
|
||||||
# 274610 <type 'str'>
|
|
||||||
# 199918 <type 'dict'>
|
|
||||||
# 114200 <type 'datetime.datetime'>
|
|
||||||
# 51817 <type 'bool'>
|
|
||||||
# 26164 <type 'list'>
|
|
||||||
# 6491 <type 'float'>
|
|
||||||
# 283 <type 'tuple'>
|
|
||||||
# 19 <type 'long'>
|
|
||||||
if isinstance(value, _simple_types):
|
|
||||||
return value
|
|
||||||
|
|
||||||
if isinstance(value, datetime.datetime):
|
|
||||||
if convert_datetime:
|
|
||||||
return timeutils.strtime(value)
|
|
||||||
else:
|
|
||||||
return value
|
|
||||||
|
|
||||||
# value of itertools.count doesn't get caught by nasty_type_tests
|
|
||||||
# and results in infinite loop when list(value) is called.
|
|
||||||
if type(value) == itertools.count:
|
|
||||||
return six.text_type(value)
|
|
||||||
|
|
||||||
# FIXME(vish): Workaround for LP bug 852095. Without this workaround,
|
|
||||||
# tests that raise an exception in a mocked method that
|
|
||||||
# has a @wrap_exception with a notifier will fail. If
|
|
||||||
# we up the dependency to 0.5.4 (when it is released) we
|
|
||||||
# can remove this workaround.
|
|
||||||
if getattr(value, '__module__', None) == 'mox':
|
|
||||||
return 'mock'
|
|
||||||
|
|
||||||
if level > max_depth:
|
|
||||||
return '?'
|
|
||||||
|
|
||||||
# The try block may not be necessary after the class check above,
|
|
||||||
# but just in case ...
|
|
||||||
try:
|
|
||||||
recursive = functools.partial(to_primitive,
|
|
||||||
convert_instances=convert_instances,
|
|
||||||
convert_datetime=convert_datetime,
|
|
||||||
level=level,
|
|
||||||
max_depth=max_depth)
|
|
||||||
if isinstance(value, dict):
|
|
||||||
return dict((k, recursive(v)) for k, v in six.iteritems(value))
|
|
||||||
elif isinstance(value, (list, tuple)):
|
|
||||||
return [recursive(lv) for lv in value]
|
|
||||||
|
|
||||||
# It's not clear why xmlrpclib created their own DateTime type, but
|
|
||||||
# for our purposes, make it a datetime type which is explicitly
|
|
||||||
# handled
|
|
||||||
if isinstance(value, xmlrpclib.DateTime):
|
|
||||||
value = datetime.datetime(*tuple(value.timetuple())[:6])
|
|
||||||
|
|
||||||
if convert_datetime and isinstance(value, datetime.datetime):
|
|
||||||
return timeutils.strtime(value)
|
|
||||||
elif isinstance(value, gettextutils.Message):
|
|
||||||
return value.data
|
|
||||||
elif hasattr(value, 'iteritems'):
|
|
||||||
return recursive(dict(value.iteritems()), level=level + 1)
|
|
||||||
elif hasattr(value, '__iter__'):
|
|
||||||
return recursive(list(value))
|
|
||||||
elif convert_instances and hasattr(value, '__dict__'):
|
|
||||||
# Likely an instance of something. Watch for cycles.
|
|
||||||
# Ignore class member vars.
|
|
||||||
return recursive(value.__dict__, level=level + 1)
|
|
||||||
elif netaddr and isinstance(value, netaddr.IPAddress):
|
|
||||||
return six.text_type(value)
|
|
||||||
else:
|
|
||||||
if any(test(value) for test in _nasty_type_tests):
|
|
||||||
return six.text_type(value)
|
|
||||||
return value
|
|
||||||
except TypeError:
|
|
||||||
# Class objects are tricky since they may define something like
|
|
||||||
# __iter__ defined but it isn't callable as list().
|
|
||||||
return six.text_type(value)
|
|
||||||
|
|
||||||
|
|
||||||
def dumps(value, default=to_primitive, **kwargs):
|
|
||||||
if is_simplejson:
|
|
||||||
kwargs['namedtuple_as_object'] = False
|
|
||||||
return json.dumps(value, default=default, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def dump(obj, fp, *args, **kwargs):
|
|
||||||
if is_simplejson:
|
|
||||||
kwargs['namedtuple_as_object'] = False
|
|
||||||
return json.dump(obj, fp, *args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def loads(s, encoding='utf-8', **kwargs):
|
|
||||||
return json.loads(strutils.safe_decode(s, encoding), **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def load(fp, encoding='utf-8', **kwargs):
|
|
||||||
return json.load(codecs.getreader(encoding)(fp), **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
try:
|
|
||||||
import anyjson
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
anyjson._modules.append((__name__, 'dumps', TypeError,
|
|
||||||
'loads', ValueError, 'load'))
|
|
||||||
anyjson.force_implementation(__name__)
|
|
||||||
@@ -29,7 +29,7 @@ import weakref
|
|||||||
from oslo.config import cfg
|
from oslo.config import cfg
|
||||||
|
|
||||||
from octavia.openstack.common import fileutils
|
from octavia.openstack.common import fileutils
|
||||||
from octavia.openstack.common.gettextutils import _, _LE, _LI
|
from octavia.openstack.common._i18n import _, _LE, _LI
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
@@ -146,58 +146,12 @@ class _FcntlLock(_FileLock):
|
|||||||
fcntl.lockf(self.lockfile, fcntl.LOCK_UN)
|
fcntl.lockf(self.lockfile, fcntl.LOCK_UN)
|
||||||
|
|
||||||
|
|
||||||
class _PosixLock(object):
|
|
||||||
def __init__(self, name):
|
|
||||||
# Hash the name because it's not valid to have POSIX semaphore
|
|
||||||
# names with things like / in them. Then use base64 to encode
|
|
||||||
# the digest() instead taking the hexdigest() because the
|
|
||||||
# result is shorter and most systems can't have shm sempahore
|
|
||||||
# names longer than 31 characters.
|
|
||||||
h = hashlib.sha1()
|
|
||||||
h.update(name.encode('ascii'))
|
|
||||||
self.name = str((b'/' + base64.urlsafe_b64encode(
|
|
||||||
h.digest())).decode('ascii'))
|
|
||||||
|
|
||||||
def acquire(self, timeout=None):
|
|
||||||
self.semaphore = posix_ipc.Semaphore(self.name,
|
|
||||||
flags=posix_ipc.O_CREAT,
|
|
||||||
initial_value=1)
|
|
||||||
self.semaphore.acquire(timeout)
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __enter__(self):
|
|
||||||
self.acquire()
|
|
||||||
return self
|
|
||||||
|
|
||||||
def release(self):
|
|
||||||
self.semaphore.release()
|
|
||||||
self.semaphore.close()
|
|
||||||
|
|
||||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
||||||
self.release()
|
|
||||||
|
|
||||||
def exists(self):
|
|
||||||
try:
|
|
||||||
semaphore = posix_ipc.Semaphore(self.name)
|
|
||||||
except posix_ipc.ExistentialError:
|
|
||||||
return False
|
|
||||||
else:
|
|
||||||
semaphore.close()
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
if os.name == 'nt':
|
if os.name == 'nt':
|
||||||
import msvcrt
|
import msvcrt
|
||||||
InterProcessLock = _WindowsLock
|
InterProcessLock = _WindowsLock
|
||||||
FileLock = _WindowsLock
|
|
||||||
else:
|
else:
|
||||||
import base64
|
|
||||||
import fcntl
|
import fcntl
|
||||||
import hashlib
|
InterProcessLock = _FcntlLock
|
||||||
|
|
||||||
import posix_ipc
|
|
||||||
InterProcessLock = _PosixLock
|
|
||||||
FileLock = _FcntlLock
|
|
||||||
|
|
||||||
_semaphores = weakref.WeakValueDictionary()
|
_semaphores = weakref.WeakValueDictionary()
|
||||||
_semaphores_lock = threading.Lock()
|
_semaphores_lock = threading.Lock()
|
||||||
@@ -214,11 +168,7 @@ def _get_lock_path(name, lock_file_prefix, lock_path=None):
|
|||||||
local_lock_path = lock_path or CONF.lock_path
|
local_lock_path = lock_path or CONF.lock_path
|
||||||
|
|
||||||
if not local_lock_path:
|
if not local_lock_path:
|
||||||
# NOTE(bnemec): Create a fake lock path for posix locks so we don't
|
raise cfg.RequiredOptError('lock_path')
|
||||||
# unnecessarily raise the RequiredOptError below.
|
|
||||||
if InterProcessLock is not _PosixLock:
|
|
||||||
raise cfg.RequiredOptError('lock_path')
|
|
||||||
local_lock_path = 'posixlock:/'
|
|
||||||
|
|
||||||
return os.path.join(local_lock_path, name)
|
return os.path.join(local_lock_path, name)
|
||||||
|
|
||||||
@@ -229,11 +179,6 @@ def external_lock(name, lock_file_prefix=None, lock_path=None):
|
|||||||
|
|
||||||
lock_file_path = _get_lock_path(name, lock_file_prefix, lock_path)
|
lock_file_path = _get_lock_path(name, lock_file_prefix, lock_path)
|
||||||
|
|
||||||
# NOTE(bnemec): If an explicit lock_path was passed to us then it
|
|
||||||
# means the caller is relying on file-based locking behavior, so
|
|
||||||
# we can't use posix locks for those calls.
|
|
||||||
if lock_path:
|
|
||||||
return FileLock(lock_file_path)
|
|
||||||
return InterProcessLock(lock_file_path)
|
return InterProcessLock(lock_file_path)
|
||||||
|
|
||||||
|
|
||||||
@@ -254,11 +199,12 @@ def internal_lock(name):
|
|||||||
with _semaphores_lock:
|
with _semaphores_lock:
|
||||||
try:
|
try:
|
||||||
sem = _semaphores[name]
|
sem = _semaphores[name]
|
||||||
|
LOG.debug('Using existing semaphore "%s"', name)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
sem = threading.Semaphore()
|
sem = threading.Semaphore()
|
||||||
_semaphores[name] = sem
|
_semaphores[name] = sem
|
||||||
|
LOG.debug('Created new semaphore "%s"', name)
|
||||||
|
|
||||||
LOG.debug('Got semaphore "%(lock)s"', {'lock': name})
|
|
||||||
return sem
|
return sem
|
||||||
|
|
||||||
|
|
||||||
@@ -280,13 +226,16 @@ def lock(name, lock_file_prefix=None, external=False, lock_path=None):
|
|||||||
"""
|
"""
|
||||||
int_lock = internal_lock(name)
|
int_lock = internal_lock(name)
|
||||||
with int_lock:
|
with int_lock:
|
||||||
if external and not CONF.disable_process_locking:
|
LOG.debug('Acquired semaphore "%(lock)s"', {'lock': name})
|
||||||
ext_lock = external_lock(name, lock_file_prefix, lock_path)
|
try:
|
||||||
with ext_lock:
|
if external and not CONF.disable_process_locking:
|
||||||
yield ext_lock
|
ext_lock = external_lock(name, lock_file_prefix, lock_path)
|
||||||
else:
|
with ext_lock:
|
||||||
yield int_lock
|
yield ext_lock
|
||||||
LOG.debug('Released semaphore "%(lock)s"', {'lock': name})
|
else:
|
||||||
|
yield int_lock
|
||||||
|
finally:
|
||||||
|
LOG.debug('Releasing semaphore "%(lock)s"', {'lock': name})
|
||||||
|
|
||||||
|
|
||||||
def synchronized(name, lock_file_prefix=None, external=False, lock_path=None):
|
def synchronized(name, lock_file_prefix=None, external=False, lock_path=None):
|
||||||
|
|||||||
@@ -27,6 +27,7 @@ It also allows setting of formatting information through conf.
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import copy
|
||||||
import inspect
|
import inspect
|
||||||
import itertools
|
import itertools
|
||||||
import logging
|
import logging
|
||||||
@@ -38,18 +39,15 @@ import sys
|
|||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from oslo.config import cfg
|
from oslo.config import cfg
|
||||||
|
from oslo.serialization import jsonutils
|
||||||
|
from oslo.utils import importutils
|
||||||
import six
|
import six
|
||||||
from six import moves
|
from six import moves
|
||||||
|
|
||||||
_PY26 = sys.version_info[0:2] == (2, 6)
|
_PY26 = sys.version_info[0:2] == (2, 6)
|
||||||
|
|
||||||
from octavia.openstack.common.gettextutils import _
|
from octavia.openstack.common._i18n import _
|
||||||
from octavia.openstack.common import importutils
|
|
||||||
from octavia.openstack.common import jsonutils
|
|
||||||
from octavia.openstack.common import local
|
from octavia.openstack.common import local
|
||||||
# NOTE(flaper87): Pls, remove when graduating this module
|
|
||||||
# from the incubator.
|
|
||||||
from octavia.openstack.common.strutils import mask_password # noqa
|
|
||||||
|
|
||||||
|
|
||||||
_DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
|
_DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
|
||||||
@@ -177,6 +175,16 @@ CONF.register_cli_opts(logging_cli_opts)
|
|||||||
CONF.register_opts(generic_log_opts)
|
CONF.register_opts(generic_log_opts)
|
||||||
CONF.register_opts(log_opts)
|
CONF.register_opts(log_opts)
|
||||||
|
|
||||||
|
|
||||||
|
def list_opts():
|
||||||
|
"""Entry point for oslo.config-generator."""
|
||||||
|
return [(None, copy.deepcopy(common_cli_opts)),
|
||||||
|
(None, copy.deepcopy(logging_cli_opts)),
|
||||||
|
(None, copy.deepcopy(generic_log_opts)),
|
||||||
|
(None, copy.deepcopy(log_opts)),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
# our new audit level
|
# our new audit level
|
||||||
# NOTE(jkoelker) Since we synthesized an audit level, make the logging
|
# NOTE(jkoelker) Since we synthesized an audit level, make the logging
|
||||||
# module aware of it so it acts like other levels.
|
# module aware of it so it acts like other levels.
|
||||||
@@ -501,14 +509,9 @@ def _setup_logging_from_conf(project, version):
|
|||||||
log_root.addHandler(streamlog)
|
log_root.addHandler(streamlog)
|
||||||
|
|
||||||
if CONF.publish_errors:
|
if CONF.publish_errors:
|
||||||
try:
|
handler = importutils.import_object(
|
||||||
handler = importutils.import_object(
|
"oslo.messaging.notify.log_handler.PublishErrorsHandler",
|
||||||
"octavia.openstack.common.log_handler.PublishErrorsHandler",
|
logging.ERROR)
|
||||||
logging.ERROR)
|
|
||||||
except ImportError:
|
|
||||||
handler = importutils.import_object(
|
|
||||||
"oslo.messaging.notify.log_handler.PublishErrorsHandler",
|
|
||||||
logging.ERROR)
|
|
||||||
log_root.addHandler(handler)
|
log_root.addHandler(handler)
|
||||||
|
|
||||||
datefmt = CONF.log_date_format
|
datefmt = CONF.log_date_format
|
||||||
@@ -549,12 +552,14 @@ def _setup_logging_from_conf(project, version):
|
|||||||
# TODO(bogdando) use the format provided by RFCSysLogHandler
|
# TODO(bogdando) use the format provided by RFCSysLogHandler
|
||||||
# after existing syslog format deprecation in J
|
# after existing syslog format deprecation in J
|
||||||
if CONF.use_syslog_rfc_format:
|
if CONF.use_syslog_rfc_format:
|
||||||
syslog = RFCSysLogHandler(facility=facility)
|
syslog = RFCSysLogHandler(address='/dev/log',
|
||||||
|
facility=facility)
|
||||||
else:
|
else:
|
||||||
syslog = logging.handlers.SysLogHandler(facility=facility)
|
syslog = logging.handlers.SysLogHandler(address='/dev/log',
|
||||||
|
facility=facility)
|
||||||
log_root.addHandler(syslog)
|
log_root.addHandler(syslog)
|
||||||
except socket.error:
|
except socket.error:
|
||||||
log_root.error('Unable to add syslog handler. Verify that syslog'
|
log_root.error('Unable to add syslog handler. Verify that syslog '
|
||||||
'is running.')
|
'is running.')
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ import time
|
|||||||
from eventlet import event
|
from eventlet import event
|
||||||
from eventlet import greenthread
|
from eventlet import greenthread
|
||||||
|
|
||||||
from octavia.openstack.common.gettextutils import _LE, _LW
|
from octavia.openstack.common._i18n import _LE, _LW
|
||||||
from octavia.openstack.common import log as logging
|
from octavia.openstack.common import log as logging
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|||||||
@@ -1,56 +0,0 @@
|
|||||||
# Copyright 2011 OpenStack Foundation.
|
|
||||||
# All Rights Reserved.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
"""Base class(es) for WSGI Middleware."""
|
|
||||||
|
|
||||||
import webob.dec
|
|
||||||
|
|
||||||
|
|
||||||
class Middleware(object):
|
|
||||||
"""Base WSGI middleware wrapper.
|
|
||||||
|
|
||||||
These classes require an application to be initialized that will be called
|
|
||||||
next. By default the middleware will simply call its wrapped app, or you
|
|
||||||
can override __call__ to customize its behavior.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def factory(cls, global_conf, **local_conf):
|
|
||||||
"""Factory method for paste.deploy."""
|
|
||||||
return cls
|
|
||||||
|
|
||||||
def __init__(self, application):
|
|
||||||
self.application = application
|
|
||||||
|
|
||||||
def process_request(self, req):
|
|
||||||
"""Called on each request.
|
|
||||||
|
|
||||||
If this returns None, the next application down the stack will be
|
|
||||||
executed. If it returns a response then that response will be returned
|
|
||||||
and execution will stop here.
|
|
||||||
"""
|
|
||||||
return None
|
|
||||||
|
|
||||||
def process_response(self, response):
|
|
||||||
"""Do whatever you'd like to the response."""
|
|
||||||
return response
|
|
||||||
|
|
||||||
@webob.dec.wsgify
|
|
||||||
def __call__(self, req):
|
|
||||||
response = self.process_request(req)
|
|
||||||
if response:
|
|
||||||
return response
|
|
||||||
response = req.get_response(self.application)
|
|
||||||
return self.process_response(response)
|
|
||||||
@@ -1,6 +1,3 @@
|
|||||||
# Copyright (c) 2013 NEC Corporation
|
|
||||||
# All Rights Reserved.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
# not use this file except in compliance with the License. You may obtain
|
# not use this file except in compliance with the License. You may obtain
|
||||||
# a copy of the License at
|
# a copy of the License at
|
||||||
@@ -13,34 +10,14 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
"""Middleware that provides high-level error handling.
|
"""Compatibility shim for Kilo, while operators migrate to oslo.middleware."""
|
||||||
|
|
||||||
It catches all exceptions from subsequent applications in WSGI pipeline
|
from oslo.middleware import catch_errors
|
||||||
to hide internal errors from API response.
|
|
||||||
"""
|
|
||||||
import logging
|
|
||||||
|
|
||||||
import webob.dec
|
|
||||||
import webob.exc
|
|
||||||
|
|
||||||
from octavia.openstack.common.gettextutils import _LE
|
|
||||||
from octavia.openstack.common.middleware import base
|
|
||||||
from octavia.openstack.common import versionutils
|
from octavia.openstack.common import versionutils
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
@versionutils.deprecated(as_of=versionutils.deprecated.KILO,
|
||||||
|
in_favor_of='oslo.middleware.CatchErrors')
|
||||||
|
class CatchErrorsMiddleware(catch_errors.CatchErrors):
|
||||||
@versionutils.deprecated(as_of=versionutils.deprecated.JUNO,
|
pass
|
||||||
in_favor_of='octavia.middleware.CatchErrors')
|
|
||||||
class CatchErrorsMiddleware(base.Middleware):
|
|
||||||
|
|
||||||
@webob.dec.wsgify
|
|
||||||
def __call__(self, req):
|
|
||||||
try:
|
|
||||||
response = req.get_response(self.application)
|
|
||||||
except Exception:
|
|
||||||
LOG.exception(_LE('An error occurred during '
|
|
||||||
'processing the request: %s'))
|
|
||||||
response = webob.exc.HTTPInternalServerError()
|
|
||||||
return response
|
|
||||||
|
|||||||
@@ -1,31 +0,0 @@
|
|||||||
# Copyright (c) 2013 Rackspace Hosting
|
|
||||||
# All Rights Reserved.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
"""Middleware that attaches a correlation id to WSGI request"""
|
|
||||||
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
from octavia.openstack.common.middleware import base
|
|
||||||
from octavia.openstack.common import versionutils
|
|
||||||
|
|
||||||
|
|
||||||
@versionutils.deprecated(as_of=versionutils.deprecated.JUNO,
|
|
||||||
in_favor_of='octavia.middleware.CorrelationId')
|
|
||||||
class CorrelationIdMiddleware(base.Middleware):
|
|
||||||
|
|
||||||
def process_request(self, req):
|
|
||||||
correlation_id = (req.headers.get("X_CORRELATION_ID") or
|
|
||||||
str(uuid.uuid4()))
|
|
||||||
req.headers['X_CORRELATION_ID'] = correlation_id
|
|
||||||
@@ -1,63 +0,0 @@
|
|||||||
# Copyright 2011 OpenStack Foundation.
|
|
||||||
# All Rights Reserved.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
"""Debug middleware"""
|
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import sys
|
|
||||||
|
|
||||||
import six
|
|
||||||
import webob.dec
|
|
||||||
|
|
||||||
from octavia.openstack.common.middleware import base
|
|
||||||
from octavia.openstack.common import versionutils
|
|
||||||
|
|
||||||
|
|
||||||
@versionutils.deprecated(as_of=versionutils.deprecated.JUNO,
|
|
||||||
in_favor_of='octavia.middleware.Debug')
|
|
||||||
class Debug(base.Middleware):
|
|
||||||
"""Helper class that returns debug information.
|
|
||||||
|
|
||||||
Can be inserted into any WSGI application chain to get information about
|
|
||||||
the request and response.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@webob.dec.wsgify
|
|
||||||
def __call__(self, req):
|
|
||||||
print(("*" * 40) + " REQUEST ENVIRON")
|
|
||||||
for key, value in req.environ.items():
|
|
||||||
print(key, "=", value)
|
|
||||||
print()
|
|
||||||
resp = req.get_response(self.application)
|
|
||||||
|
|
||||||
print(("*" * 40) + " RESPONSE HEADERS")
|
|
||||||
for (key, value) in six.iteritems(resp.headers):
|
|
||||||
print(key, "=", value)
|
|
||||||
print()
|
|
||||||
|
|
||||||
resp.app_iter = self.print_generator(resp.app_iter)
|
|
||||||
|
|
||||||
return resp
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def print_generator(app_iter):
|
|
||||||
"""Prints the contents of a wrapper string iterator when iterated."""
|
|
||||||
print(("*" * 40) + " BODY")
|
|
||||||
for part in app_iter:
|
|
||||||
sys.stdout.write(part)
|
|
||||||
sys.stdout.flush()
|
|
||||||
yield part
|
|
||||||
print()
|
|
||||||
@@ -1,6 +1,3 @@
|
|||||||
# Copyright (c) 2013 NEC Corporation
|
|
||||||
# All Rights Reserved.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
# not use this file except in compliance with the License. You may obtain
|
# not use this file except in compliance with the License. You may obtain
|
||||||
# a copy of the License at
|
# a copy of the License at
|
||||||
@@ -13,16 +10,10 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
"""Middleware that ensures request ID.
|
"""Compatibility shim for Kilo, while operators migrate to oslo.middleware."""
|
||||||
|
|
||||||
It ensures to assign request ID for each API request and set it to
|
from oslo.middleware import request_id
|
||||||
request environment. The request ID is also added to API response.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import webob.dec
|
|
||||||
|
|
||||||
from octavia.openstack.common import context
|
|
||||||
from octavia.openstack.common.middleware import base
|
|
||||||
from octavia.openstack.common import versionutils
|
from octavia.openstack.common import versionutils
|
||||||
|
|
||||||
|
|
||||||
@@ -30,15 +21,7 @@ ENV_REQUEST_ID = 'openstack.request_id'
|
|||||||
HTTP_RESP_HEADER_REQUEST_ID = 'x-openstack-request-id'
|
HTTP_RESP_HEADER_REQUEST_ID = 'x-openstack-request-id'
|
||||||
|
|
||||||
|
|
||||||
@versionutils.deprecated(as_of=versionutils.deprecated.JUNO,
|
@versionutils.deprecated(as_of=versionutils.deprecated.KILO,
|
||||||
in_favor_of='octavia.middleware.RequestId')
|
in_favor_of='oslo.middleware.RequestId')
|
||||||
class RequestIdMiddleware(base.Middleware):
|
class RequestIdMiddleware(request_id.RequestId):
|
||||||
|
pass
|
||||||
@webob.dec.wsgify
|
|
||||||
def __call__(self, req):
|
|
||||||
req_id = context.generate_request_id()
|
|
||||||
req.environ[ENV_REQUEST_ID] = req_id
|
|
||||||
response = req.get_response(self.application)
|
|
||||||
if HTTP_RESP_HEADER_REQUEST_ID not in response.headers:
|
|
||||||
response.headers.add(HTTP_RESP_HEADER_REQUEST_ID, req_id)
|
|
||||||
return response
|
|
||||||
|
|||||||
@@ -1,86 +0,0 @@
|
|||||||
# Copyright (c) 2012 Red Hat, Inc.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
"""
|
|
||||||
Request Body limiting middleware.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
from oslo.config import cfg
|
|
||||||
import webob.dec
|
|
||||||
import webob.exc
|
|
||||||
|
|
||||||
from octavia.openstack.common.gettextutils import _
|
|
||||||
from octavia.openstack.common.middleware import base
|
|
||||||
from octavia.openstack.common import versionutils
|
|
||||||
|
|
||||||
|
|
||||||
# default request size is 112k
|
|
||||||
max_req_body_size = cfg.IntOpt('max_request_body_size',
|
|
||||||
deprecated_name='osapi_max_request_body_size',
|
|
||||||
default=114688,
|
|
||||||
help='The maximum body size for each '
|
|
||||||
' request, in bytes.')
|
|
||||||
|
|
||||||
CONF = cfg.CONF
|
|
||||||
CONF.register_opt(max_req_body_size)
|
|
||||||
|
|
||||||
|
|
||||||
class LimitingReader(object):
|
|
||||||
"""Reader to limit the size of an incoming request."""
|
|
||||||
def __init__(self, data, limit):
|
|
||||||
"""Initiates LimitingReader object.
|
|
||||||
|
|
||||||
:param data: Underlying data object
|
|
||||||
:param limit: maximum number of bytes the reader should allow
|
|
||||||
"""
|
|
||||||
self.data = data
|
|
||||||
self.limit = limit
|
|
||||||
self.bytes_read = 0
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
for chunk in self.data:
|
|
||||||
self.bytes_read += len(chunk)
|
|
||||||
if self.bytes_read > self.limit:
|
|
||||||
msg = _("Request is too large.")
|
|
||||||
raise webob.exc.HTTPRequestEntityTooLarge(explanation=msg)
|
|
||||||
else:
|
|
||||||
yield chunk
|
|
||||||
|
|
||||||
def read(self, i=None):
|
|
||||||
result = self.data.read(i)
|
|
||||||
self.bytes_read += len(result)
|
|
||||||
if self.bytes_read > self.limit:
|
|
||||||
msg = _("Request is too large.")
|
|
||||||
raise webob.exc.HTTPRequestEntityTooLarge(explanation=msg)
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
@versionutils.deprecated(
|
|
||||||
as_of=versionutils.deprecated.JUNO,
|
|
||||||
in_favor_of='octavia.middleware.RequestBodySizeLimiter')
|
|
||||||
class RequestBodySizeLimiter(base.Middleware):
|
|
||||||
"""Limit the size of incoming requests."""
|
|
||||||
|
|
||||||
@webob.dec.wsgify
|
|
||||||
def __call__(self, req):
|
|
||||||
if (req.content_length is not None and
|
|
||||||
req.content_length > CONF.max_request_body_size):
|
|
||||||
msg = _("Request is too large.")
|
|
||||||
raise webob.exc.HTTPRequestEntityTooLarge(explanation=msg)
|
|
||||||
if req.content_length is None and req.is_body_readable:
|
|
||||||
limiter = LimitingReader(req.body_file,
|
|
||||||
CONF.max_request_body_size)
|
|
||||||
req.body_file = limiter
|
|
||||||
return self.application
|
|
||||||
@@ -1,163 +0,0 @@
|
|||||||
# Copyright 2012 OpenStack Foundation.
|
|
||||||
# All Rights Reserved.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
"""
|
|
||||||
Network-related utilities and helper functions.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import socket
|
|
||||||
|
|
||||||
from six.moves.urllib import parse
|
|
||||||
|
|
||||||
from octavia.openstack.common.gettextutils import _LW
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def parse_host_port(address, default_port=None):
|
|
||||||
"""Interpret a string as a host:port pair.
|
|
||||||
|
|
||||||
An IPv6 address MUST be escaped if accompanied by a port,
|
|
||||||
because otherwise ambiguity ensues: 2001:db8:85a3::8a2e:370:7334
|
|
||||||
means both [2001:db8:85a3::8a2e:370:7334] and
|
|
||||||
[2001:db8:85a3::8a2e:370]:7334.
|
|
||||||
|
|
||||||
>>> parse_host_port('server01:80')
|
|
||||||
('server01', 80)
|
|
||||||
>>> parse_host_port('server01')
|
|
||||||
('server01', None)
|
|
||||||
>>> parse_host_port('server01', default_port=1234)
|
|
||||||
('server01', 1234)
|
|
||||||
>>> parse_host_port('[::1]:80')
|
|
||||||
('::1', 80)
|
|
||||||
>>> parse_host_port('[::1]')
|
|
||||||
('::1', None)
|
|
||||||
>>> parse_host_port('[::1]', default_port=1234)
|
|
||||||
('::1', 1234)
|
|
||||||
>>> parse_host_port('2001:db8:85a3::8a2e:370:7334', default_port=1234)
|
|
||||||
('2001:db8:85a3::8a2e:370:7334', 1234)
|
|
||||||
>>> parse_host_port(None)
|
|
||||||
(None, None)
|
|
||||||
"""
|
|
||||||
if not address:
|
|
||||||
return (None, None)
|
|
||||||
|
|
||||||
if address[0] == '[':
|
|
||||||
# Escaped ipv6
|
|
||||||
_host, _port = address[1:].split(']')
|
|
||||||
host = _host
|
|
||||||
if ':' in _port:
|
|
||||||
port = _port.split(':')[1]
|
|
||||||
else:
|
|
||||||
port = default_port
|
|
||||||
else:
|
|
||||||
if address.count(':') == 1:
|
|
||||||
host, port = address.split(':')
|
|
||||||
else:
|
|
||||||
# 0 means ipv4, >1 means ipv6.
|
|
||||||
# We prohibit unescaped ipv6 addresses with port.
|
|
||||||
host = address
|
|
||||||
port = default_port
|
|
||||||
|
|
||||||
return (host, None if port is None else int(port))
|
|
||||||
|
|
||||||
|
|
||||||
class ModifiedSplitResult(parse.SplitResult):
|
|
||||||
"""Split results class for urlsplit."""
|
|
||||||
|
|
||||||
# NOTE(dims): The functions below are needed for Python 2.6.x.
|
|
||||||
# We can remove these when we drop support for 2.6.x.
|
|
||||||
@property
|
|
||||||
def hostname(self):
|
|
||||||
netloc = self.netloc.split('@', 1)[-1]
|
|
||||||
host, port = parse_host_port(netloc)
|
|
||||||
return host
|
|
||||||
|
|
||||||
@property
|
|
||||||
def port(self):
|
|
||||||
netloc = self.netloc.split('@', 1)[-1]
|
|
||||||
host, port = parse_host_port(netloc)
|
|
||||||
return port
|
|
||||||
|
|
||||||
|
|
||||||
def urlsplit(url, scheme='', allow_fragments=True):
|
|
||||||
"""Parse a URL using urlparse.urlsplit(), splitting query and fragments.
|
|
||||||
This function papers over Python issue9374 when needed.
|
|
||||||
|
|
||||||
The parameters are the same as urlparse.urlsplit.
|
|
||||||
"""
|
|
||||||
scheme, netloc, path, query, fragment = parse.urlsplit(
|
|
||||||
url, scheme, allow_fragments)
|
|
||||||
if allow_fragments and '#' in path:
|
|
||||||
path, fragment = path.split('#', 1)
|
|
||||||
if '?' in path:
|
|
||||||
path, query = path.split('?', 1)
|
|
||||||
return ModifiedSplitResult(scheme, netloc,
|
|
||||||
path, query, fragment)
|
|
||||||
|
|
||||||
|
|
||||||
def set_tcp_keepalive(sock, tcp_keepalive=True,
|
|
||||||
tcp_keepidle=None,
|
|
||||||
tcp_keepalive_interval=None,
|
|
||||||
tcp_keepalive_count=None):
|
|
||||||
"""Set values for tcp keepalive parameters
|
|
||||||
|
|
||||||
This function configures tcp keepalive parameters if users wish to do
|
|
||||||
so.
|
|
||||||
|
|
||||||
:param tcp_keepalive: Boolean, turn on or off tcp_keepalive. If users are
|
|
||||||
not sure, this should be True, and default values will be used.
|
|
||||||
|
|
||||||
:param tcp_keepidle: time to wait before starting to send keepalive probes
|
|
||||||
:param tcp_keepalive_interval: time between successive probes, once the
|
|
||||||
initial wait time is over
|
|
||||||
:param tcp_keepalive_count: number of probes to send before the connection
|
|
||||||
is killed
|
|
||||||
"""
|
|
||||||
|
|
||||||
# NOTE(praneshp): Despite keepalive being a tcp concept, the level is
|
|
||||||
# still SOL_SOCKET. This is a quirk.
|
|
||||||
if isinstance(tcp_keepalive, bool):
|
|
||||||
sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, tcp_keepalive)
|
|
||||||
else:
|
|
||||||
raise TypeError("tcp_keepalive must be a boolean")
|
|
||||||
|
|
||||||
if not tcp_keepalive:
|
|
||||||
return
|
|
||||||
|
|
||||||
# These options aren't available in the OS X version of eventlet,
|
|
||||||
# Idle + Count * Interval effectively gives you the total timeout.
|
|
||||||
if tcp_keepidle is not None:
|
|
||||||
if hasattr(socket, 'TCP_KEEPIDLE'):
|
|
||||||
sock.setsockopt(socket.IPPROTO_TCP,
|
|
||||||
socket.TCP_KEEPIDLE,
|
|
||||||
tcp_keepidle)
|
|
||||||
else:
|
|
||||||
LOG.warning(_LW('tcp_keepidle not available on your system'))
|
|
||||||
if tcp_keepalive_interval is not None:
|
|
||||||
if hasattr(socket, 'TCP_KEEPINTVL'):
|
|
||||||
sock.setsockopt(socket.IPPROTO_TCP,
|
|
||||||
socket.TCP_KEEPINTVL,
|
|
||||||
tcp_keepalive_interval)
|
|
||||||
else:
|
|
||||||
LOG.warning(_LW('tcp_keepintvl not available on your system'))
|
|
||||||
if tcp_keepalive_count is not None:
|
|
||||||
if hasattr(socket, 'TCP_KEEPCNT'):
|
|
||||||
sock.setsockopt(socket.IPPROTO_TCP,
|
|
||||||
socket.TCP_KEEPCNT,
|
|
||||||
tcp_keepalive_count)
|
|
||||||
else:
|
|
||||||
LOG.warning(_LW('tcp_keepknt not available on your system'))
|
|
||||||
@@ -11,13 +11,14 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
|
import copy
|
||||||
import random
|
import random
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from oslo.config import cfg
|
from oslo.config import cfg
|
||||||
import six
|
import six
|
||||||
|
|
||||||
from octavia.openstack.common.gettextutils import _, _LE, _LI
|
from octavia.openstack.common._i18n import _, _LE, _LI
|
||||||
from octavia.openstack.common import log as logging
|
from octavia.openstack.common import log as logging
|
||||||
|
|
||||||
|
|
||||||
@@ -36,6 +37,11 @@ LOG = logging.getLogger(__name__)
|
|||||||
DEFAULT_INTERVAL = 60.0
|
DEFAULT_INTERVAL = 60.0
|
||||||
|
|
||||||
|
|
||||||
|
def list_opts():
|
||||||
|
"""Entry point for oslo.config-generator."""
|
||||||
|
return [(None, copy.deepcopy(periodic_opts))]
|
||||||
|
|
||||||
|
|
||||||
class InvalidPeriodicTaskArg(Exception):
|
class InvalidPeriodicTaskArg(Exception):
|
||||||
message = _("Unexpected argument for periodic task creation: %(arg)s.")
|
message = _("Unexpected argument for periodic task creation: %(arg)s.")
|
||||||
|
|
||||||
|
|||||||
@@ -77,16 +77,18 @@ as it allows particular rules to be explicitly disabled.
|
|||||||
|
|
||||||
import abc
|
import abc
|
||||||
import ast
|
import ast
|
||||||
|
import copy
|
||||||
|
import os
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from oslo.config import cfg
|
from oslo.config import cfg
|
||||||
|
from oslo.serialization import jsonutils
|
||||||
import six
|
import six
|
||||||
import six.moves.urllib.parse as urlparse
|
import six.moves.urllib.parse as urlparse
|
||||||
import six.moves.urllib.request as urlrequest
|
import six.moves.urllib.request as urlrequest
|
||||||
|
|
||||||
from octavia.openstack.common import fileutils
|
from octavia.openstack.common import fileutils
|
||||||
from octavia.openstack.common.gettextutils import _, _LE
|
from octavia.openstack.common._i18n import _, _LE, _LI
|
||||||
from octavia.openstack.common import jsonutils
|
|
||||||
from octavia.openstack.common import log as logging
|
from octavia.openstack.common import log as logging
|
||||||
|
|
||||||
|
|
||||||
@@ -98,6 +100,14 @@ policy_opts = [
|
|||||||
default='default',
|
default='default',
|
||||||
help=_('Default rule. Enforced when a requested rule is not '
|
help=_('Default rule. Enforced when a requested rule is not '
|
||||||
'found.')),
|
'found.')),
|
||||||
|
cfg.MultiStrOpt('policy_dirs',
|
||||||
|
default=['policy.d'],
|
||||||
|
help=_('Directories where policy configuration files are '
|
||||||
|
'stored. They can be relative to any directory '
|
||||||
|
'in the search path defined by the config_dir '
|
||||||
|
'option, or absolute paths. The file defined by '
|
||||||
|
'policy_file must exist for these directories to '
|
||||||
|
'be searched.')),
|
||||||
]
|
]
|
||||||
|
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
@@ -108,6 +118,11 @@ LOG = logging.getLogger(__name__)
|
|||||||
_checks = {}
|
_checks = {}
|
||||||
|
|
||||||
|
|
||||||
|
def list_opts():
|
||||||
|
"""Entry point for oslo.config-generator."""
|
||||||
|
return [(None, copy.deepcopy(policy_opts))]
|
||||||
|
|
||||||
|
|
||||||
class PolicyNotAuthorized(Exception):
|
class PolicyNotAuthorized(Exception):
|
||||||
|
|
||||||
def __init__(self, rule):
|
def __init__(self, rule):
|
||||||
@@ -184,16 +199,19 @@ class Enforcer(object):
|
|||||||
:param default_rule: Default rule to use, CONF.default_rule will
|
:param default_rule: Default rule to use, CONF.default_rule will
|
||||||
be used if none is specified.
|
be used if none is specified.
|
||||||
:param use_conf: Whether to load rules from cache or config file.
|
:param use_conf: Whether to load rules from cache or config file.
|
||||||
|
:param overwrite: Whether to overwrite existing rules when reload rules
|
||||||
|
from config file.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, policy_file=None, rules=None,
|
def __init__(self, policy_file=None, rules=None,
|
||||||
default_rule=None, use_conf=True):
|
default_rule=None, use_conf=True, overwrite=True):
|
||||||
self.rules = Rules(rules, default_rule)
|
|
||||||
self.default_rule = default_rule or CONF.policy_default_rule
|
self.default_rule = default_rule or CONF.policy_default_rule
|
||||||
|
self.rules = Rules(rules, self.default_rule)
|
||||||
|
|
||||||
self.policy_path = None
|
self.policy_path = None
|
||||||
self.policy_file = policy_file or CONF.policy_file
|
self.policy_file = policy_file or CONF.policy_file
|
||||||
self.use_conf = use_conf
|
self.use_conf = use_conf
|
||||||
|
self.overwrite = overwrite
|
||||||
|
|
||||||
def set_rules(self, rules, overwrite=True, use_conf=False):
|
def set_rules(self, rules, overwrite=True, use_conf=False):
|
||||||
"""Create a new Rules object based on the provided dict of rules.
|
"""Create a new Rules object based on the provided dict of rules.
|
||||||
@@ -225,7 +243,7 @@ class Enforcer(object):
|
|||||||
|
|
||||||
Policy file is cached and will be reloaded if modified.
|
Policy file is cached and will be reloaded if modified.
|
||||||
|
|
||||||
:param force_reload: Whether to overwrite current rules.
|
:param force_reload: Whether to reload rules from config file.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if force_reload:
|
if force_reload:
|
||||||
@@ -233,31 +251,55 @@ class Enforcer(object):
|
|||||||
|
|
||||||
if self.use_conf:
|
if self.use_conf:
|
||||||
if not self.policy_path:
|
if not self.policy_path:
|
||||||
self.policy_path = self._get_policy_path()
|
self.policy_path = self._get_policy_path(self.policy_file)
|
||||||
|
|
||||||
|
self._load_policy_file(self.policy_path, force_reload,
|
||||||
|
overwrite=self.overwrite)
|
||||||
|
for path in CONF.policy_dirs:
|
||||||
|
try:
|
||||||
|
path = self._get_policy_path(path)
|
||||||
|
except cfg.ConfigFilesNotFoundError:
|
||||||
|
LOG.info(_LI("Can not find policy directory: %s"), path)
|
||||||
|
continue
|
||||||
|
self._walk_through_policy_directory(path,
|
||||||
|
self._load_policy_file,
|
||||||
|
force_reload, False)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _walk_through_policy_directory(path, func, *args):
|
||||||
|
# We do not iterate over sub-directories.
|
||||||
|
policy_files = next(os.walk(path))[2]
|
||||||
|
policy_files.sort()
|
||||||
|
for policy_file in [p for p in policy_files if not p.startswith('.')]:
|
||||||
|
func(os.path.join(path, policy_file), *args)
|
||||||
|
|
||||||
|
def _load_policy_file(self, path, force_reload, overwrite=True):
|
||||||
reloaded, data = fileutils.read_cached_file(
|
reloaded, data = fileutils.read_cached_file(
|
||||||
self.policy_path, force_reload=force_reload)
|
path, force_reload=force_reload)
|
||||||
if reloaded or not self.rules:
|
if reloaded or not self.rules or not overwrite:
|
||||||
rules = Rules.load_json(data, self.default_rule)
|
rules = Rules.load_json(data, self.default_rule)
|
||||||
self.set_rules(rules)
|
self.set_rules(rules, overwrite=overwrite, use_conf=True)
|
||||||
LOG.debug("Rules successfully reloaded")
|
LOG.debug("Rules successfully reloaded")
|
||||||
|
|
||||||
def _get_policy_path(self):
|
def _get_policy_path(self, path):
|
||||||
"""Locate the policy json data file.
|
"""Locate the policy json data file/path.
|
||||||
|
|
||||||
:param policy_file: Custom policy file to locate.
|
:param path: It's value can be a full path or related path. When
|
||||||
|
full path specified, this function just returns the full
|
||||||
|
path. When related path specified, this function will
|
||||||
|
search configuration directories to find one that exists.
|
||||||
|
|
||||||
:returns: The policy path
|
:returns: The policy path
|
||||||
|
|
||||||
:raises: ConfigFilesNotFoundError if the file couldn't
|
:raises: ConfigFilesNotFoundError if the file/path couldn't
|
||||||
be located.
|
be located.
|
||||||
"""
|
"""
|
||||||
policy_file = CONF.find_file(self.policy_file)
|
policy_path = CONF.find_file(path)
|
||||||
|
|
||||||
if policy_file:
|
if policy_path:
|
||||||
return policy_file
|
return policy_path
|
||||||
|
|
||||||
raise cfg.ConfigFilesNotFoundError((self.policy_file,))
|
raise cfg.ConfigFilesNotFoundError((path,))
|
||||||
|
|
||||||
def enforce(self, rule, target, creds, do_raise=False,
|
def enforce(self, rule, target, creds, do_raise=False,
|
||||||
exc=None, *args, **kwargs):
|
exc=None, *args, **kwargs):
|
||||||
@@ -272,7 +314,7 @@ class Enforcer(object):
|
|||||||
:param do_raise: Whether to raise an exception or not if check
|
:param do_raise: Whether to raise an exception or not if check
|
||||||
fails.
|
fails.
|
||||||
:param exc: Class of the exception to raise if the check fails.
|
:param exc: Class of the exception to raise if the check fails.
|
||||||
Any remaining arguments passed to check() (both
|
Any remaining arguments passed to enforce() (both
|
||||||
positional and keyword arguments) will be passed to
|
positional and keyword arguments) will be passed to
|
||||||
the exception class. If not specified, PolicyNotAuthorized
|
the exception class. If not specified, PolicyNotAuthorized
|
||||||
will be used.
|
will be used.
|
||||||
@@ -785,7 +827,7 @@ def _parse_text_rule(rule):
|
|||||||
return state.result
|
return state.result
|
||||||
except ValueError:
|
except ValueError:
|
||||||
# Couldn't parse the rule
|
# Couldn't parse the rule
|
||||||
LOG.exception(_LE("Failed to understand rule %r") % rule)
|
LOG.exception(_LE("Failed to understand rule %s") % rule)
|
||||||
|
|
||||||
# Fail closed
|
# Fail closed
|
||||||
return FalseCheck()
|
return FalseCheck()
|
||||||
@@ -856,7 +898,17 @@ class HttpCheck(Check):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
url = ('http:' + self.match) % target
|
url = ('http:' + self.match) % target
|
||||||
data = {'target': jsonutils.dumps(target),
|
|
||||||
|
# Convert instances of object() in target temporarily to
|
||||||
|
# empty dict to avoid circular reference detection
|
||||||
|
# errors in jsonutils.dumps().
|
||||||
|
temp_target = copy.deepcopy(target)
|
||||||
|
for key in target.keys():
|
||||||
|
element = target.get(key)
|
||||||
|
if type(element) is object:
|
||||||
|
temp_target[key] = {}
|
||||||
|
|
||||||
|
data = {'target': jsonutils.dumps(temp_target),
|
||||||
'credentials': jsonutils.dumps(creds)}
|
'credentials': jsonutils.dumps(creds)}
|
||||||
post_data = urlparse.urlencode(data)
|
post_data = urlparse.urlencode(data)
|
||||||
f = urlrequest.urlopen(url, post_data)
|
f = urlrequest.urlopen(url, post_data)
|
||||||
@@ -876,7 +928,6 @@ class GenericCheck(Check):
|
|||||||
'Member':%(role.name)s
|
'Member':%(role.name)s
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# TODO(termie): do dict inspection via dot syntax
|
|
||||||
try:
|
try:
|
||||||
match = self.match % target
|
match = self.match % target
|
||||||
except KeyError:
|
except KeyError:
|
||||||
@@ -889,7 +940,10 @@ class GenericCheck(Check):
|
|||||||
leftval = ast.literal_eval(self.kind)
|
leftval = ast.literal_eval(self.kind)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
try:
|
try:
|
||||||
leftval = creds[self.kind]
|
kind_parts = self.kind.split('.')
|
||||||
|
leftval = creds
|
||||||
|
for kind_part in kind_parts:
|
||||||
|
leftval = leftval[kind_part]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
return False
|
return False
|
||||||
return match == six.text_type(leftval)
|
return match == six.text_type(leftval)
|
||||||
|
|||||||
@@ -27,10 +27,10 @@ import signal
|
|||||||
|
|
||||||
from eventlet.green import subprocess
|
from eventlet.green import subprocess
|
||||||
from eventlet import greenthread
|
from eventlet import greenthread
|
||||||
|
from oslo.utils import strutils
|
||||||
import six
|
import six
|
||||||
|
|
||||||
from octavia.openstack.common.gettextutils import _
|
from octavia.openstack.common._i18n import _
|
||||||
from octavia.openstack.common import strutils
|
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
@@ -242,7 +242,8 @@ def trycmd(*args, **kwargs):
|
|||||||
|
|
||||||
def ssh_execute(ssh, cmd, process_input=None,
|
def ssh_execute(ssh, cmd, process_input=None,
|
||||||
addl_env=None, check_exit_code=True):
|
addl_env=None, check_exit_code=True):
|
||||||
LOG.debug('Running cmd (SSH): %s', cmd)
|
sanitized_cmd = strutils.mask_password(cmd)
|
||||||
|
LOG.debug('Running cmd (SSH): %s', sanitized_cmd)
|
||||||
if addl_env:
|
if addl_env:
|
||||||
raise InvalidArgumentError(_('Environment not supported over SSH'))
|
raise InvalidArgumentError(_('Environment not supported over SSH'))
|
||||||
|
|
||||||
@@ -256,7 +257,10 @@ def ssh_execute(ssh, cmd, process_input=None,
|
|||||||
# NOTE(justinsb): This seems suspicious...
|
# NOTE(justinsb): This seems suspicious...
|
||||||
# ...other SSH clients have buffering issues with this approach
|
# ...other SSH clients have buffering issues with this approach
|
||||||
stdout = stdout_stream.read()
|
stdout = stdout_stream.read()
|
||||||
|
sanitized_stdout = strutils.mask_password(stdout)
|
||||||
stderr = stderr_stream.read()
|
stderr = stderr_stream.read()
|
||||||
|
sanitized_stderr = strutils.mask_password(stderr)
|
||||||
|
|
||||||
stdin_stream.close()
|
stdin_stream.close()
|
||||||
|
|
||||||
exit_status = channel.recv_exit_status()
|
exit_status = channel.recv_exit_status()
|
||||||
@@ -266,11 +270,11 @@ def ssh_execute(ssh, cmd, process_input=None,
|
|||||||
LOG.debug('Result was %s' % exit_status)
|
LOG.debug('Result was %s' % exit_status)
|
||||||
if check_exit_code and exit_status != 0:
|
if check_exit_code and exit_status != 0:
|
||||||
raise ProcessExecutionError(exit_code=exit_status,
|
raise ProcessExecutionError(exit_code=exit_status,
|
||||||
stdout=stdout,
|
stdout=sanitized_stdout,
|
||||||
stderr=stderr,
|
stderr=sanitized_stderr,
|
||||||
cmd=cmd)
|
cmd=sanitized_cmd)
|
||||||
|
|
||||||
return (stdout, stderr)
|
return (sanitized_stdout, sanitized_stderr)
|
||||||
|
|
||||||
|
|
||||||
def get_worker_count():
|
def get_worker_count():
|
||||||
|
|||||||
@@ -38,14 +38,12 @@ from eventlet import event
|
|||||||
from oslo.config import cfg
|
from oslo.config import cfg
|
||||||
|
|
||||||
from octavia.openstack.common import eventlet_backdoor
|
from octavia.openstack.common import eventlet_backdoor
|
||||||
from octavia.openstack.common.gettextutils import _LE, _LI, _LW
|
from octavia.openstack.common._i18n import _LE, _LI, _LW
|
||||||
from octavia.openstack.common import importutils
|
|
||||||
from octavia.openstack.common import log as logging
|
from octavia.openstack.common import log as logging
|
||||||
from octavia.openstack.common import systemd
|
from octavia.openstack.common import systemd
|
||||||
from octavia.openstack.common import threadgroup
|
from octavia.openstack.common import threadgroup
|
||||||
|
|
||||||
|
|
||||||
rpc = importutils.try_import('octavia.openstack.common.rpc')
|
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -180,12 +178,6 @@ class ServiceLauncher(Launcher):
|
|||||||
status = exc.code
|
status = exc.code
|
||||||
finally:
|
finally:
|
||||||
self.stop()
|
self.stop()
|
||||||
if rpc:
|
|
||||||
try:
|
|
||||||
rpc.cleanup()
|
|
||||||
except Exception:
|
|
||||||
# We're shutting down, so it doesn't matter at this point.
|
|
||||||
LOG.exception(_LE('Exception during rpc cleanup.'))
|
|
||||||
|
|
||||||
return status, signo
|
return status, signo
|
||||||
|
|
||||||
|
|||||||
@@ -12,12 +12,13 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
|
import copy
|
||||||
import os
|
import os
|
||||||
import ssl
|
import ssl
|
||||||
|
|
||||||
from oslo.config import cfg
|
from oslo.config import cfg
|
||||||
|
|
||||||
from octavia.openstack.common.gettextutils import _
|
from octavia.openstack.common._i18n import _
|
||||||
|
|
||||||
|
|
||||||
ssl_opts = [
|
ssl_opts = [
|
||||||
@@ -32,9 +33,14 @@ ssl_opts = [
|
|||||||
"the server securely."),
|
"the server securely."),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
CONF.register_opts(ssl_opts, "ssl")
|
config_section = 'ssl'
|
||||||
|
CONF.register_opts(ssl_opts, config_section)
|
||||||
|
|
||||||
|
|
||||||
|
def list_opts():
|
||||||
|
"""Entry point for oslo.config-generator."""
|
||||||
|
return [(config_section, copy.deepcopy(ssl_opts))]
|
||||||
|
|
||||||
|
|
||||||
def is_enabled():
|
def is_enabled():
|
||||||
|
|||||||
@@ -1,311 +0,0 @@
|
|||||||
# Copyright 2011 OpenStack Foundation.
|
|
||||||
# All Rights Reserved.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
"""
|
|
||||||
System-level utilities and helper functions.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import math
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
import unicodedata
|
|
||||||
|
|
||||||
import six
|
|
||||||
|
|
||||||
from octavia.openstack.common.gettextutils import _
|
|
||||||
|
|
||||||
|
|
||||||
UNIT_PREFIX_EXPONENT = {
|
|
||||||
'k': 1,
|
|
||||||
'K': 1,
|
|
||||||
'Ki': 1,
|
|
||||||
'M': 2,
|
|
||||||
'Mi': 2,
|
|
||||||
'G': 3,
|
|
||||||
'Gi': 3,
|
|
||||||
'T': 4,
|
|
||||||
'Ti': 4,
|
|
||||||
}
|
|
||||||
UNIT_SYSTEM_INFO = {
|
|
||||||
'IEC': (1024, re.compile(r'(^[-+]?\d*\.?\d+)([KMGT]i?)?(b|bit|B)$')),
|
|
||||||
'SI': (1000, re.compile(r'(^[-+]?\d*\.?\d+)([kMGT])?(b|bit|B)$')),
|
|
||||||
}
|
|
||||||
|
|
||||||
TRUE_STRINGS = ('1', 't', 'true', 'on', 'y', 'yes')
|
|
||||||
FALSE_STRINGS = ('0', 'f', 'false', 'off', 'n', 'no')
|
|
||||||
|
|
||||||
SLUGIFY_STRIP_RE = re.compile(r"[^\w\s-]")
|
|
||||||
SLUGIFY_HYPHENATE_RE = re.compile(r"[-\s]+")
|
|
||||||
|
|
||||||
|
|
||||||
# NOTE(flaper87): The following globals are used by `mask_password`
|
|
||||||
_SANITIZE_KEYS = ['adminPass', 'admin_pass', 'password', 'admin_password']
|
|
||||||
|
|
||||||
# NOTE(ldbragst): Let's build a list of regex objects using the list of
|
|
||||||
# _SANITIZE_KEYS we already have. This way, we only have to add the new key
|
|
||||||
# to the list of _SANITIZE_KEYS and we can generate regular expressions
|
|
||||||
# for XML and JSON automatically.
|
|
||||||
_SANITIZE_PATTERNS_2 = []
|
|
||||||
_SANITIZE_PATTERNS_1 = []
|
|
||||||
|
|
||||||
# NOTE(amrith): Some regular expressions have only one parameter, some
|
|
||||||
# have two parameters. Use different lists of patterns here.
|
|
||||||
_FORMAT_PATTERNS_1 = [r'(%(key)s\s*[=]\s*)[^\s^\'^\"]+']
|
|
||||||
_FORMAT_PATTERNS_2 = [r'(%(key)s\s*[=]\s*[\"\']).*?([\"\'])',
|
|
||||||
r'(%(key)s\s+[\"\']).*?([\"\'])',
|
|
||||||
r'([-]{2}%(key)s\s+)[^\'^\"^=^\s]+([\s]*)',
|
|
||||||
r'(<%(key)s>).*?(</%(key)s>)',
|
|
||||||
r'([\"\']%(key)s[\"\']\s*:\s*[\"\']).*?([\"\'])',
|
|
||||||
r'([\'"].*?%(key)s[\'"]\s*:\s*u?[\'"]).*?([\'"])',
|
|
||||||
r'([\'"].*?%(key)s[\'"]\s*,\s*\'--?[A-z]+\'\s*,\s*u?'
|
|
||||||
'[\'"]).*?([\'"])',
|
|
||||||
r'(%(key)s\s*--?[A-z]+\s*)\S+(\s*)']
|
|
||||||
|
|
||||||
for key in _SANITIZE_KEYS:
|
|
||||||
for pattern in _FORMAT_PATTERNS_2:
|
|
||||||
reg_ex = re.compile(pattern % {'key': key}, re.DOTALL)
|
|
||||||
_SANITIZE_PATTERNS_2.append(reg_ex)
|
|
||||||
|
|
||||||
for pattern in _FORMAT_PATTERNS_1:
|
|
||||||
reg_ex = re.compile(pattern % {'key': key}, re.DOTALL)
|
|
||||||
_SANITIZE_PATTERNS_1.append(reg_ex)
|
|
||||||
|
|
||||||
|
|
||||||
def int_from_bool_as_string(subject):
|
|
||||||
"""Interpret a string as a boolean and return either 1 or 0.
|
|
||||||
|
|
||||||
Any string value in:
|
|
||||||
|
|
||||||
('True', 'true', 'On', 'on', '1')
|
|
||||||
|
|
||||||
is interpreted as a boolean True.
|
|
||||||
|
|
||||||
Useful for JSON-decoded stuff and config file parsing
|
|
||||||
"""
|
|
||||||
return bool_from_string(subject) and 1 or 0
|
|
||||||
|
|
||||||
|
|
||||||
def bool_from_string(subject, strict=False, default=False):
|
|
||||||
"""Interpret a string as a boolean.
|
|
||||||
|
|
||||||
A case-insensitive match is performed such that strings matching 't',
|
|
||||||
'true', 'on', 'y', 'yes', or '1' are considered True and, when
|
|
||||||
`strict=False`, anything else returns the value specified by 'default'.
|
|
||||||
|
|
||||||
Useful for JSON-decoded stuff and config file parsing.
|
|
||||||
|
|
||||||
If `strict=True`, unrecognized values, including None, will raise a
|
|
||||||
ValueError which is useful when parsing values passed in from an API call.
|
|
||||||
Strings yielding False are 'f', 'false', 'off', 'n', 'no', or '0'.
|
|
||||||
"""
|
|
||||||
if not isinstance(subject, six.string_types):
|
|
||||||
subject = six.text_type(subject)
|
|
||||||
|
|
||||||
lowered = subject.strip().lower()
|
|
||||||
|
|
||||||
if lowered in TRUE_STRINGS:
|
|
||||||
return True
|
|
||||||
elif lowered in FALSE_STRINGS:
|
|
||||||
return False
|
|
||||||
elif strict:
|
|
||||||
acceptable = ', '.join(
|
|
||||||
"'%s'" % s for s in sorted(TRUE_STRINGS + FALSE_STRINGS))
|
|
||||||
msg = _("Unrecognized value '%(val)s', acceptable values are:"
|
|
||||||
" %(acceptable)s") % {'val': subject,
|
|
||||||
'acceptable': acceptable}
|
|
||||||
raise ValueError(msg)
|
|
||||||
else:
|
|
||||||
return default
|
|
||||||
|
|
||||||
|
|
||||||
def safe_decode(text, incoming=None, errors='strict'):
|
|
||||||
"""Decodes incoming text/bytes string using `incoming` if they're not
|
|
||||||
already unicode.
|
|
||||||
|
|
||||||
:param incoming: Text's current encoding
|
|
||||||
:param errors: Errors handling policy. See here for valid
|
|
||||||
values http://docs.python.org/2/library/codecs.html
|
|
||||||
:returns: text or a unicode `incoming` encoded
|
|
||||||
representation of it.
|
|
||||||
:raises TypeError: If text is not an instance of str
|
|
||||||
"""
|
|
||||||
if not isinstance(text, (six.string_types, six.binary_type)):
|
|
||||||
raise TypeError("%s can't be decoded" % type(text))
|
|
||||||
|
|
||||||
if isinstance(text, six.text_type):
|
|
||||||
return text
|
|
||||||
|
|
||||||
if not incoming:
|
|
||||||
incoming = (sys.stdin.encoding or
|
|
||||||
sys.getdefaultencoding())
|
|
||||||
|
|
||||||
try:
|
|
||||||
return text.decode(incoming, errors)
|
|
||||||
except UnicodeDecodeError:
|
|
||||||
# Note(flaper87) If we get here, it means that
|
|
||||||
# sys.stdin.encoding / sys.getdefaultencoding
|
|
||||||
# didn't return a suitable encoding to decode
|
|
||||||
# text. This happens mostly when global LANG
|
|
||||||
# var is not set correctly and there's no
|
|
||||||
# default encoding. In this case, most likely
|
|
||||||
# python will use ASCII or ANSI encoders as
|
|
||||||
# default encodings but they won't be capable
|
|
||||||
# of decoding non-ASCII characters.
|
|
||||||
#
|
|
||||||
# Also, UTF-8 is being used since it's an ASCII
|
|
||||||
# extension.
|
|
||||||
return text.decode('utf-8', errors)
|
|
||||||
|
|
||||||
|
|
||||||
def safe_encode(text, incoming=None,
|
|
||||||
encoding='utf-8', errors='strict'):
|
|
||||||
"""Encodes incoming text/bytes string using `encoding`.
|
|
||||||
|
|
||||||
If incoming is not specified, text is expected to be encoded with
|
|
||||||
current python's default encoding. (`sys.getdefaultencoding`)
|
|
||||||
|
|
||||||
:param incoming: Text's current encoding
|
|
||||||
:param encoding: Expected encoding for text (Default UTF-8)
|
|
||||||
:param errors: Errors handling policy. See here for valid
|
|
||||||
values http://docs.python.org/2/library/codecs.html
|
|
||||||
:returns: text or a bytestring `encoding` encoded
|
|
||||||
representation of it.
|
|
||||||
:raises TypeError: If text is not an instance of str
|
|
||||||
"""
|
|
||||||
if not isinstance(text, (six.string_types, six.binary_type)):
|
|
||||||
raise TypeError("%s can't be encoded" % type(text))
|
|
||||||
|
|
||||||
if not incoming:
|
|
||||||
incoming = (sys.stdin.encoding or
|
|
||||||
sys.getdefaultencoding())
|
|
||||||
|
|
||||||
if isinstance(text, six.text_type):
|
|
||||||
return text.encode(encoding, errors)
|
|
||||||
elif text and encoding != incoming:
|
|
||||||
# Decode text before encoding it with `encoding`
|
|
||||||
text = safe_decode(text, incoming, errors)
|
|
||||||
return text.encode(encoding, errors)
|
|
||||||
else:
|
|
||||||
return text
|
|
||||||
|
|
||||||
|
|
||||||
def string_to_bytes(text, unit_system='IEC', return_int=False):
|
|
||||||
"""Converts a string into an float representation of bytes.
|
|
||||||
|
|
||||||
The units supported for IEC ::
|
|
||||||
|
|
||||||
Kb(it), Kib(it), Mb(it), Mib(it), Gb(it), Gib(it), Tb(it), Tib(it)
|
|
||||||
KB, KiB, MB, MiB, GB, GiB, TB, TiB
|
|
||||||
|
|
||||||
The units supported for SI ::
|
|
||||||
|
|
||||||
kb(it), Mb(it), Gb(it), Tb(it)
|
|
||||||
kB, MB, GB, TB
|
|
||||||
|
|
||||||
Note that the SI unit system does not support capital letter 'K'
|
|
||||||
|
|
||||||
:param text: String input for bytes size conversion.
|
|
||||||
:param unit_system: Unit system for byte size conversion.
|
|
||||||
:param return_int: If True, returns integer representation of text
|
|
||||||
in bytes. (default: decimal)
|
|
||||||
:returns: Numerical representation of text in bytes.
|
|
||||||
:raises ValueError: If text has an invalid value.
|
|
||||||
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
base, reg_ex = UNIT_SYSTEM_INFO[unit_system]
|
|
||||||
except KeyError:
|
|
||||||
msg = _('Invalid unit system: "%s"') % unit_system
|
|
||||||
raise ValueError(msg)
|
|
||||||
match = reg_ex.match(text)
|
|
||||||
if match:
|
|
||||||
magnitude = float(match.group(1))
|
|
||||||
unit_prefix = match.group(2)
|
|
||||||
if match.group(3) in ['b', 'bit']:
|
|
||||||
magnitude /= 8
|
|
||||||
else:
|
|
||||||
msg = _('Invalid string format: %s') % text
|
|
||||||
raise ValueError(msg)
|
|
||||||
if not unit_prefix:
|
|
||||||
res = magnitude
|
|
||||||
else:
|
|
||||||
res = magnitude * pow(base, UNIT_PREFIX_EXPONENT[unit_prefix])
|
|
||||||
if return_int:
|
|
||||||
return int(math.ceil(res))
|
|
||||||
return res
|
|
||||||
|
|
||||||
|
|
||||||
def to_slug(value, incoming=None, errors="strict"):
|
|
||||||
"""Normalize string.
|
|
||||||
|
|
||||||
Convert to lowercase, remove non-word characters, and convert spaces
|
|
||||||
to hyphens.
|
|
||||||
|
|
||||||
Inspired by Django's `slugify` filter.
|
|
||||||
|
|
||||||
:param value: Text to slugify
|
|
||||||
:param incoming: Text's current encoding
|
|
||||||
:param errors: Errors handling policy. See here for valid
|
|
||||||
values http://docs.python.org/2/library/codecs.html
|
|
||||||
:returns: slugified unicode representation of `value`
|
|
||||||
:raises TypeError: If text is not an instance of str
|
|
||||||
"""
|
|
||||||
value = safe_decode(value, incoming, errors)
|
|
||||||
# NOTE(aababilov): no need to use safe_(encode|decode) here:
|
|
||||||
# encodings are always "ascii", error handling is always "ignore"
|
|
||||||
# and types are always known (first: unicode; second: str)
|
|
||||||
value = unicodedata.normalize("NFKD", value).encode(
|
|
||||||
"ascii", "ignore").decode("ascii")
|
|
||||||
value = SLUGIFY_STRIP_RE.sub("", value).strip().lower()
|
|
||||||
return SLUGIFY_HYPHENATE_RE.sub("-", value)
|
|
||||||
|
|
||||||
|
|
||||||
def mask_password(message, secret="***"):
|
|
||||||
"""Replace password with 'secret' in message.
|
|
||||||
|
|
||||||
:param message: The string which includes security information.
|
|
||||||
:param secret: value with which to replace passwords.
|
|
||||||
:returns: The unicode value of message with the password fields masked.
|
|
||||||
|
|
||||||
For example:
|
|
||||||
|
|
||||||
>>> mask_password("'adminPass' : 'aaaaa'")
|
|
||||||
"'adminPass' : '***'"
|
|
||||||
>>> mask_password("'admin_pass' : 'aaaaa'")
|
|
||||||
"'admin_pass' : '***'"
|
|
||||||
>>> mask_password('"password" : "aaaaa"')
|
|
||||||
'"password" : "***"'
|
|
||||||
>>> mask_password("'original_password' : 'aaaaa'")
|
|
||||||
"'original_password' : '***'"
|
|
||||||
>>> mask_password("u'original_password' : u'aaaaa'")
|
|
||||||
"u'original_password' : u'***'"
|
|
||||||
"""
|
|
||||||
message = six.text_type(message)
|
|
||||||
|
|
||||||
# NOTE(ldbragst): Check to see if anything in message contains any key
|
|
||||||
# specified in _SANITIZE_KEYS, if not then just return the message since
|
|
||||||
# we don't have to mask any passwords.
|
|
||||||
if not any(key in message for key in _SANITIZE_KEYS):
|
|
||||||
return message
|
|
||||||
|
|
||||||
substitute = r'\g<1>' + secret + r'\g<2>'
|
|
||||||
for pattern in _SANITIZE_PATTERNS_2:
|
|
||||||
message = re.sub(pattern, substitute, message)
|
|
||||||
|
|
||||||
substitute = r'\g<1>' + secret
|
|
||||||
for pattern in _SANITIZE_PATTERNS_1:
|
|
||||||
message = re.sub(pattern, substitute, message)
|
|
||||||
|
|
||||||
return message
|
|
||||||
@@ -96,6 +96,8 @@ class ThreadGroup(object):
|
|||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
x.stop()
|
x.stop()
|
||||||
|
except eventlet.greenlet.GreenletExit:
|
||||||
|
pass
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
LOG.exception(ex)
|
LOG.exception(ex)
|
||||||
|
|
||||||
|
|||||||
@@ -1,210 +0,0 @@
|
|||||||
# Copyright 2011 OpenStack Foundation.
|
|
||||||
# All Rights Reserved.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
"""
|
|
||||||
Time related utilities and helper functions.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import calendar
|
|
||||||
import datetime
|
|
||||||
import time
|
|
||||||
|
|
||||||
import iso8601
|
|
||||||
import six
|
|
||||||
|
|
||||||
|
|
||||||
# ISO 8601 extended time format with microseconds
|
|
||||||
_ISO8601_TIME_FORMAT_SUBSECOND = '%Y-%m-%dT%H:%M:%S.%f'
|
|
||||||
_ISO8601_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S'
|
|
||||||
PERFECT_TIME_FORMAT = _ISO8601_TIME_FORMAT_SUBSECOND
|
|
||||||
|
|
||||||
|
|
||||||
def isotime(at=None, subsecond=False):
|
|
||||||
"""Stringify time in ISO 8601 format."""
|
|
||||||
if not at:
|
|
||||||
at = utcnow()
|
|
||||||
st = at.strftime(_ISO8601_TIME_FORMAT
|
|
||||||
if not subsecond
|
|
||||||
else _ISO8601_TIME_FORMAT_SUBSECOND)
|
|
||||||
tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC'
|
|
||||||
st += ('Z' if tz == 'UTC' else tz)
|
|
||||||
return st
|
|
||||||
|
|
||||||
|
|
||||||
def parse_isotime(timestr):
|
|
||||||
"""Parse time from ISO 8601 format."""
|
|
||||||
try:
|
|
||||||
return iso8601.parse_date(timestr)
|
|
||||||
except iso8601.ParseError as e:
|
|
||||||
raise ValueError(six.text_type(e))
|
|
||||||
except TypeError as e:
|
|
||||||
raise ValueError(six.text_type(e))
|
|
||||||
|
|
||||||
|
|
||||||
def strtime(at=None, fmt=PERFECT_TIME_FORMAT):
|
|
||||||
"""Returns formatted utcnow."""
|
|
||||||
if not at:
|
|
||||||
at = utcnow()
|
|
||||||
return at.strftime(fmt)
|
|
||||||
|
|
||||||
|
|
||||||
def parse_strtime(timestr, fmt=PERFECT_TIME_FORMAT):
|
|
||||||
"""Turn a formatted time back into a datetime."""
|
|
||||||
return datetime.datetime.strptime(timestr, fmt)
|
|
||||||
|
|
||||||
|
|
||||||
def normalize_time(timestamp):
|
|
||||||
"""Normalize time in arbitrary timezone to UTC naive object."""
|
|
||||||
offset = timestamp.utcoffset()
|
|
||||||
if offset is None:
|
|
||||||
return timestamp
|
|
||||||
return timestamp.replace(tzinfo=None) - offset
|
|
||||||
|
|
||||||
|
|
||||||
def is_older_than(before, seconds):
|
|
||||||
"""Return True if before is older than seconds."""
|
|
||||||
if isinstance(before, six.string_types):
|
|
||||||
before = parse_strtime(before).replace(tzinfo=None)
|
|
||||||
else:
|
|
||||||
before = before.replace(tzinfo=None)
|
|
||||||
|
|
||||||
return utcnow() - before > datetime.timedelta(seconds=seconds)
|
|
||||||
|
|
||||||
|
|
||||||
def is_newer_than(after, seconds):
|
|
||||||
"""Return True if after is newer than seconds."""
|
|
||||||
if isinstance(after, six.string_types):
|
|
||||||
after = parse_strtime(after).replace(tzinfo=None)
|
|
||||||
else:
|
|
||||||
after = after.replace(tzinfo=None)
|
|
||||||
|
|
||||||
return after - utcnow() > datetime.timedelta(seconds=seconds)
|
|
||||||
|
|
||||||
|
|
||||||
def utcnow_ts():
|
|
||||||
"""Timestamp version of our utcnow function."""
|
|
||||||
if utcnow.override_time is None:
|
|
||||||
# NOTE(kgriffs): This is several times faster
|
|
||||||
# than going through calendar.timegm(...)
|
|
||||||
return int(time.time())
|
|
||||||
|
|
||||||
return calendar.timegm(utcnow().timetuple())
|
|
||||||
|
|
||||||
|
|
||||||
def utcnow():
|
|
||||||
"""Overridable version of utils.utcnow."""
|
|
||||||
if utcnow.override_time:
|
|
||||||
try:
|
|
||||||
return utcnow.override_time.pop(0)
|
|
||||||
except AttributeError:
|
|
||||||
return utcnow.override_time
|
|
||||||
return datetime.datetime.utcnow()
|
|
||||||
|
|
||||||
|
|
||||||
def iso8601_from_timestamp(timestamp):
|
|
||||||
"""Returns an iso8601 formatted date from timestamp."""
|
|
||||||
return isotime(datetime.datetime.utcfromtimestamp(timestamp))
|
|
||||||
|
|
||||||
|
|
||||||
utcnow.override_time = None
|
|
||||||
|
|
||||||
|
|
||||||
def set_time_override(override_time=None):
|
|
||||||
"""Overrides utils.utcnow.
|
|
||||||
|
|
||||||
Make it return a constant time or a list thereof, one at a time.
|
|
||||||
|
|
||||||
:param override_time: datetime instance or list thereof. If not
|
|
||||||
given, defaults to the current UTC time.
|
|
||||||
"""
|
|
||||||
utcnow.override_time = override_time or datetime.datetime.utcnow()
|
|
||||||
|
|
||||||
|
|
||||||
def advance_time_delta(timedelta):
|
|
||||||
"""Advance overridden time using a datetime.timedelta."""
|
|
||||||
assert utcnow.override_time is not None
|
|
||||||
try:
|
|
||||||
for dt in utcnow.override_time:
|
|
||||||
dt += timedelta
|
|
||||||
except TypeError:
|
|
||||||
utcnow.override_time += timedelta
|
|
||||||
|
|
||||||
|
|
||||||
def advance_time_seconds(seconds):
|
|
||||||
"""Advance overridden time by seconds."""
|
|
||||||
advance_time_delta(datetime.timedelta(0, seconds))
|
|
||||||
|
|
||||||
|
|
||||||
def clear_time_override():
|
|
||||||
"""Remove the overridden time."""
|
|
||||||
utcnow.override_time = None
|
|
||||||
|
|
||||||
|
|
||||||
def marshall_now(now=None):
|
|
||||||
"""Make an rpc-safe datetime with microseconds.
|
|
||||||
|
|
||||||
Note: tzinfo is stripped, but not required for relative times.
|
|
||||||
"""
|
|
||||||
if not now:
|
|
||||||
now = utcnow()
|
|
||||||
return dict(day=now.day, month=now.month, year=now.year, hour=now.hour,
|
|
||||||
minute=now.minute, second=now.second,
|
|
||||||
microsecond=now.microsecond)
|
|
||||||
|
|
||||||
|
|
||||||
def unmarshall_time(tyme):
|
|
||||||
"""Unmarshall a datetime dict."""
|
|
||||||
return datetime.datetime(day=tyme['day'],
|
|
||||||
month=tyme['month'],
|
|
||||||
year=tyme['year'],
|
|
||||||
hour=tyme['hour'],
|
|
||||||
minute=tyme['minute'],
|
|
||||||
second=tyme['second'],
|
|
||||||
microsecond=tyme['microsecond'])
|
|
||||||
|
|
||||||
|
|
||||||
def delta_seconds(before, after):
|
|
||||||
"""Return the difference between two timing objects.
|
|
||||||
|
|
||||||
Compute the difference in seconds between two date, time, or
|
|
||||||
datetime objects (as a float, to microsecond resolution).
|
|
||||||
"""
|
|
||||||
delta = after - before
|
|
||||||
return total_seconds(delta)
|
|
||||||
|
|
||||||
|
|
||||||
def total_seconds(delta):
|
|
||||||
"""Return the total seconds of datetime.timedelta object.
|
|
||||||
|
|
||||||
Compute total seconds of datetime.timedelta, datetime.timedelta
|
|
||||||
doesn't have method total_seconds in Python2.6, calculate it manually.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
return delta.total_seconds()
|
|
||||||
except AttributeError:
|
|
||||||
return ((delta.days * 24 * 3600) + delta.seconds +
|
|
||||||
float(delta.microseconds) / (10 ** 6))
|
|
||||||
|
|
||||||
|
|
||||||
def is_soon(dt, window):
|
|
||||||
"""Determines if time is going to happen in the next window seconds.
|
|
||||||
|
|
||||||
:param dt: the time
|
|
||||||
:param window: minimum seconds to remain to consider the time not soon
|
|
||||||
|
|
||||||
:return: True if expiration is within the given duration
|
|
||||||
"""
|
|
||||||
soon = (utcnow() + datetime.timedelta(seconds=window))
|
|
||||||
return normalize_time(dt) <= soon
|
|
||||||
@@ -23,7 +23,7 @@ import inspect
|
|||||||
import pkg_resources
|
import pkg_resources
|
||||||
import six
|
import six
|
||||||
|
|
||||||
from octavia.openstack.common.gettextutils import _
|
from octavia.openstack.common._i18n import _
|
||||||
from octavia.openstack.common import log as logging
|
from octavia.openstack.common import log as logging
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,45 +0,0 @@
|
|||||||
# Copyright 2014, Doug Wiegley, A10 Networks.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
import octavia.tests.unit.base as base
|
|
||||||
import octavia.openstack.common.cache
|
|
||||||
import octavia.openstack.common.context
|
|
||||||
import octavia.openstack.common.excutils
|
|
||||||
import octavia.openstack.common.fixture
|
|
||||||
import octavia.openstack.common.gettextutils
|
|
||||||
import octavia.openstack.common.importutils
|
|
||||||
import octavia.openstack.common.jsonutils
|
|
||||||
import octavia.openstack.common.local
|
|
||||||
import octavia.openstack.common.lockutils
|
|
||||||
import octavia.openstack.common.log
|
|
||||||
import octavia.openstack.common.loopingcall
|
|
||||||
import octavia.openstack.common.middleware
|
|
||||||
import octavia.openstack.common.network_utils
|
|
||||||
import octavia.openstack.common.periodic_task
|
|
||||||
import octavia.openstack.common.policy
|
|
||||||
import octavia.openstack.common.processutils
|
|
||||||
import octavia.openstack.common.service
|
|
||||||
import octavia.openstack.common.sslutils
|
|
||||||
import octavia.openstack.common.strutils
|
|
||||||
import octavia.openstack.common.systemd
|
|
||||||
import octavia.openstack.common.threadgroup
|
|
||||||
import octavia.openstack.common.timeutils
|
|
||||||
import octavia.openstack.common.uuidutils
|
|
||||||
import octavia.openstack.common.versionutils
|
|
||||||
|
|
||||||
|
|
||||||
class TestCommon(base.TestCase):
|
|
||||||
def test_openstack_common(self):
|
|
||||||
# The test is the imports
|
|
||||||
pass
|
|
||||||
@@ -3,33 +3,21 @@
|
|||||||
module=cache
|
module=cache
|
||||||
module=context
|
module=context
|
||||||
module=eventlet_backdoor
|
module=eventlet_backdoor
|
||||||
module=excutils
|
|
||||||
module=fileutils
|
module=fileutils
|
||||||
module=fixture
|
module=fixture
|
||||||
module=gettextutils
|
|
||||||
module=importutils
|
|
||||||
module=install_venv_common
|
module=install_venv_common
|
||||||
module=jsonutils
|
|
||||||
module=local
|
module=local
|
||||||
module=lockutils
|
module=lockutils
|
||||||
module=log
|
module=log
|
||||||
module=loopingcall
|
module=loopingcall
|
||||||
module=middleware.base
|
module=middleware
|
||||||
module=middleware.catch_errors
|
|
||||||
module=middleware.correlation_id
|
|
||||||
module=middleware.debug
|
|
||||||
module=middleware.request_id
|
|
||||||
module=middleware.sizelimit
|
|
||||||
module=network_utils
|
|
||||||
module=periodic_task
|
module=periodic_task
|
||||||
module=policy
|
module=policy
|
||||||
module=processutils
|
module=processutils
|
||||||
module=service
|
module=service
|
||||||
module=sslutils
|
module=sslutils
|
||||||
module=strutils
|
|
||||||
module=systemd
|
module=systemd
|
||||||
module=threadgroup
|
module=threadgroup
|
||||||
module=timeutils
|
|
||||||
module=uuidutils
|
module=uuidutils
|
||||||
module=versionutils
|
module=versionutils
|
||||||
|
|
||||||
|
|||||||
@@ -23,10 +23,14 @@ netaddr>=0.7.6
|
|||||||
python-neutronclient>=2.3.6,<3
|
python-neutronclient>=2.3.6,<3
|
||||||
WebOb>=1.2.3
|
WebOb>=1.2.3
|
||||||
six>=1.7.0
|
six>=1.7.0
|
||||||
oslo.config>=1.4.0.0a3
|
oslo.config>=1.4.0 # Apache-2.0
|
||||||
oslo.db>=0.4.0 # Apache-2.0
|
oslo.db>=1.1.0 # Apache-2.0
|
||||||
oslo.messaging>=1.4.0.0a3
|
oslo.i18n>=1.0.0 # Apache-2.0
|
||||||
oslo.rootwrap>=1.3.0.0a1
|
oslo.messaging>=1.4.0
|
||||||
|
oslo.middleware>=0.1.0 # Apache-2.0
|
||||||
|
oslo.rootwrap>=1.3.0
|
||||||
|
oslo.serialization>=1.0.0 # Apache-2.0
|
||||||
|
oslo.utils>=1.0.0 # Apache-2.0
|
||||||
python-barbicanclient>=3.0
|
python-barbicanclient>=3.0
|
||||||
python-keystoneclient>=0.11.1
|
python-keystoneclient>=0.11.1
|
||||||
python-novaclient>=2.17.0
|
python-novaclient>=2.17.0
|
||||||
|
|||||||
4
tox.ini
4
tox.ini
@@ -25,5 +25,9 @@ show-source = true
|
|||||||
builtins = _
|
builtins = _
|
||||||
exclude = .venv,.git,.tox,dist,doc,*openstack/common*,*lib/python*,*egg,build,tools,.ropeproject,rally-scenarios
|
exclude = .venv,.git,.tox,dist,doc,*openstack/common*,*lib/python*,*egg,build,tools,.ropeproject,rally-scenarios
|
||||||
|
|
||||||
|
[hacking]
|
||||||
|
import_exceptions = octavia.i18n
|
||||||
|
local-check-factory = octavia.hacking.checks.factory
|
||||||
|
|
||||||
[doc8]
|
[doc8]
|
||||||
max-line-length = 79
|
max-line-length = 79
|
||||||
|
|||||||
Reference in New Issue
Block a user