From 75905790e54336366ec6bb4ce73131b2911b0c50 Mon Sep 17 00:00:00 2001 From: Andrey Pavlov Date: Wed, 2 Sep 2015 15:56:59 +0300 Subject: [PATCH] update gce api to current OpenStack - add devstack plugin - update openstack common - move unit tests to unit folder - update infrastructural files Change-Id: Id72006f70110dbd1762f42b582470ac5f3439f2a --- .testr.conf | 5 +- devstack/README.rst | 13 + devstack/plugin.sh | 249 +++++ devstack/settings | 4 + etc/gceapi/README-ec2api.conf.txt | 4 + etc/gceapi/gceapi-config-generator.conf | 10 + etc/gceapi/gceapi.conf.sample | 284 ------ gceapi/api/__init__.py | 4 +- gceapi/api/address_neutron_api.py | 4 +- gceapi/api/address_nova_api.py | 2 +- gceapi/api/base_api.py | 4 +- gceapi/api/clients.py | 95 +- gceapi/api/common.py | 10 +- gceapi/api/discovery.py | 6 +- gceapi/api/firewall_api.py | 9 +- gceapi/api/image_api.py | 2 +- gceapi/api/instance_address_api.py | 5 +- gceapi/api/instance_api.py | 5 +- gceapi/api/instance_disk_api.py | 5 +- gceapi/api/instances.py | 4 +- gceapi/api/network_neutron_api.py | 6 +- gceapi/api/network_nova_api.py | 2 +- gceapi/api/oauth.py | 10 +- gceapi/api/operation_api.py | 5 +- gceapi/api/operation_util.py | 5 +- gceapi/api/project_api.py | 2 +- gceapi/api/projects.py | 2 +- gceapi/api/region_api.py | 2 +- gceapi/api/route_neutron_api.py | 2 +- gceapi/api/route_nova_api.py | 2 +- gceapi/api/wsgi.py | 6 +- gceapi/auth.py | 8 +- gceapi/cmd/__init__.py | 19 - gceapi/cmd/api.py | 43 +- gceapi/cmd/manage.py | 57 +- gceapi/config.py | 57 +- gceapi/context.py | 7 +- gceapi/db/api.py | 49 +- gceapi/db/migration.py | 37 +- gceapi/db/sqlalchemy/__init__.py | 13 - gceapi/db/sqlalchemy/api.py | 28 +- gceapi/db/sqlalchemy/migrate_repo/__init__.py | 13 - .../migrate_repo/versions/__init__.py | 13 - gceapi/db/sqlalchemy/migration.py | 70 +- gceapi/db/sqlalchemy/models.py | 3 +- gceapi/exception.py | 7 +- gceapi/i18n.py | 46 + gceapi/openstack/common/_i18n.py | 45 + gceapi/openstack/common/db/api.py | 57 -- gceapi/openstack/common/db/exception.py | 54 -- .../common/db/sqlalchemy/__init__.py | 0 .../common/db/sqlalchemy/migration.py | 265 ------ .../openstack/common/db/sqlalchemy/models.py | 117 --- .../common/db/sqlalchemy/provision.py | 187 ---- .../openstack/common/db/sqlalchemy/session.py | 867 ------------------ .../common/db/sqlalchemy/test_migrations.py | 269 ------ .../openstack/common/db/sqlalchemy/utils.py | 548 ----------- gceapi/openstack/common/eventlet_backdoor.py | 21 +- gceapi/openstack/common/excutils.py | 99 -- gceapi/openstack/common/gettextutils.py | 440 --------- gceapi/openstack/common/importutils.py | 66 -- gceapi/openstack/common/jsonutils.py | 182 ---- gceapi/openstack/common/log.py | 657 ------------- .../openstack/common/py3kcompat/__init__.py | 0 .../openstack/common/py3kcompat/urlutils.py | 67 -- gceapi/openstack/common/test.py | 88 -- gceapi/openstack/common/timeutils.py | 210 ----- gceapi/paths.py | 32 +- gceapi/service.py | 8 +- gceapi/tests/__init__.py | 30 - gceapi/tests/{api => unit}/__init__.py | 16 +- .../common/db => tests/unit/api}/__init__.py | 0 gceapi/tests/{ => unit}/api/common.py | 21 +- .../{ => unit}/api/fake_cinder_client.py | 4 +- gceapi/tests/{ => unit}/api/fake_db.py | 0 .../{ => unit}/api/fake_glance_client.py | 6 +- .../{ => unit}/api/fake_keystone_client.py | 4 +- .../{ => unit}/api/fake_neutron_client.py | 2 +- .../tests/{ => unit}/api/fake_nova_client.py | 6 +- gceapi/tests/{ => unit}/api/fake_request.py | 0 gceapi/tests/{ => unit}/api/test_addresses.py | 2 +- gceapi/tests/{ => unit}/api/test_disks.py | 2 +- gceapi/tests/{ => unit}/api/test_fields.py | 2 +- gceapi/tests/{ => unit}/api/test_firewalls.py | 2 +- gceapi/tests/{ => unit}/api/test_images.py | 2 +- gceapi/tests/{ => unit}/api/test_instances.py | 2 +- .../{ => unit}/api/test_machine_types.py | 2 +- gceapi/tests/{ => unit}/api/test_networks.py | 2 +- .../tests/{ => unit}/api/test_operations.py | 2 +- gceapi/tests/{ => unit}/api/test_projects.py | 2 +- gceapi/tests/{ => unit}/api/test_regions.py | 2 +- gceapi/tests/{ => unit}/api/test_routes.py | 2 +- gceapi/tests/{ => unit}/api/test_snapshots.py | 2 +- gceapi/tests/{ => unit}/api/test_zones.py | 2 +- gceapi/tests/{ => unit}/api/utils.py | 0 gceapi/{ => tests/unit}/test.py | 9 +- gceapi/version.py | 24 +- gceapi/wsgi.py | 48 +- gceapi/wsgi_ext.py | 16 +- install.sh | 2 +- openstack-common.conf | 2 +- requirements.txt | 35 +- run_tests.sh | 123 --- setup.cfg | 23 +- setup.py | 8 + test-requirements.txt | 7 +- {bin => tools/db}/gceapi-db-setup | 0 tools/lintstack.py | 199 ---- tools/lintstack.sh | 59 -- tox.ini | 19 +- 110 files changed, 887 insertions(+), 5321 deletions(-) create mode 100644 devstack/README.rst create mode 100755 devstack/plugin.sh create mode 100644 devstack/settings create mode 100644 etc/gceapi/README-ec2api.conf.txt create mode 100644 etc/gceapi/gceapi-config-generator.conf delete mode 100644 etc/gceapi/gceapi.conf.sample create mode 100644 gceapi/i18n.py create mode 100644 gceapi/openstack/common/_i18n.py delete mode 100644 gceapi/openstack/common/db/api.py delete mode 100644 gceapi/openstack/common/db/exception.py delete mode 100644 gceapi/openstack/common/db/sqlalchemy/__init__.py delete mode 100644 gceapi/openstack/common/db/sqlalchemy/migration.py delete mode 100644 gceapi/openstack/common/db/sqlalchemy/models.py delete mode 100644 gceapi/openstack/common/db/sqlalchemy/provision.py delete mode 100644 gceapi/openstack/common/db/sqlalchemy/session.py delete mode 100644 gceapi/openstack/common/db/sqlalchemy/test_migrations.py delete mode 100644 gceapi/openstack/common/db/sqlalchemy/utils.py delete mode 100644 gceapi/openstack/common/excutils.py delete mode 100644 gceapi/openstack/common/gettextutils.py delete mode 100644 gceapi/openstack/common/importutils.py delete mode 100644 gceapi/openstack/common/jsonutils.py delete mode 100644 gceapi/openstack/common/log.py delete mode 100644 gceapi/openstack/common/py3kcompat/__init__.py delete mode 100644 gceapi/openstack/common/py3kcompat/urlutils.py delete mode 100644 gceapi/openstack/common/test.py delete mode 100644 gceapi/openstack/common/timeutils.py rename gceapi/tests/{api => unit}/__init__.py (57%) rename gceapi/{openstack/common/db => tests/unit/api}/__init__.py (100%) rename gceapi/tests/{ => unit}/api/common.py (89%) rename gceapi/tests/{ => unit}/api/fake_cinder_client.py (99%) rename gceapi/tests/{ => unit}/api/fake_db.py (100%) rename gceapi/tests/{ => unit}/api/fake_glance_client.py (96%) rename gceapi/tests/{ => unit}/api/fake_keystone_client.py (91%) rename gceapi/tests/{ => unit}/api/fake_neutron_client.py (99%) rename gceapi/tests/{ => unit}/api/fake_nova_client.py (99%) rename gceapi/tests/{ => unit}/api/fake_request.py (100%) rename gceapi/tests/{ => unit}/api/test_addresses.py (99%) rename gceapi/tests/{ => unit}/api/test_disks.py (99%) rename gceapi/tests/{ => unit}/api/test_fields.py (98%) rename gceapi/tests/{ => unit}/api/test_firewalls.py (99%) rename gceapi/tests/{ => unit}/api/test_images.py (99%) rename gceapi/tests/{ => unit}/api/test_instances.py (99%) rename gceapi/tests/{ => unit}/api/test_machine_types.py (99%) rename gceapi/tests/{ => unit}/api/test_networks.py (99%) rename gceapi/tests/{ => unit}/api/test_operations.py (99%) rename gceapi/tests/{ => unit}/api/test_projects.py (98%) rename gceapi/tests/{ => unit}/api/test_regions.py (98%) rename gceapi/tests/{ => unit}/api/test_routes.py (99%) rename gceapi/tests/{ => unit}/api/test_snapshots.py (99%) rename gceapi/tests/{ => unit}/api/test_zones.py (98%) rename gceapi/tests/{ => unit}/api/utils.py (100%) rename gceapi/{ => tests/unit}/test.py (95%) delete mode 100755 run_tests.sh rename {bin => tools/db}/gceapi-db-setup (100%) delete mode 100755 tools/lintstack.py delete mode 100755 tools/lintstack.sh diff --git a/.testr.conf b/.testr.conf index 968b110..97913dd 100644 --- a/.testr.conf +++ b/.testr.conf @@ -1,4 +1,7 @@ [DEFAULT] -test_command=OS_STDOUT_CAPTURE=1 OS_STDERR_CAPTURE=1 OS_LOG_CAPTURE=1 ${PYTHON:-python} -m subunit.run discover -t ./ gceapi/tests $LISTOPT $IDOPTION +test_command=OS_STDOUT_CAPTURE=1 \ + OS_STDERR_CAPTURE=1 \ + OS_LOG_CAPTURE=1 \ + ${PYTHON:-python} -m subunit.run discover -t ./ gceapi/tests/unit $LISTOPT $IDOPTION test_id_option=--load-list $IDFILE test_list_option=--list diff --git a/devstack/README.rst b/devstack/README.rst new file mode 100644 index 0000000..291222a --- /dev/null +++ b/devstack/README.rst @@ -0,0 +1,13 @@ +====================== + Enabling in Devstack +====================== + +1. Download DevStack + +2. Add this repo as an external repository:: + + > cat local.conf + [[local|localrc]] + enable_plugin gce-api https://github.com/stackforge/gce-api + +3. run ``stack.sh`` diff --git a/devstack/plugin.sh b/devstack/plugin.sh new file mode 100755 index 0000000..b1940ef --- /dev/null +++ b/devstack/plugin.sh @@ -0,0 +1,249 @@ +# lib/gce-api + +# Dependencies: +# ``functions`` file +# ``DEST``, ``DATA_DIR``, ``STACK_USER`` must be defined + +# ``stack.sh`` calls the entry points in this order: +# +# install_gceapi +# configure_gceapi +# start_gceapi +# stop_gceapi + + +# Save trace setting +XTRACE=$(set +o | grep xtrace) +set -o xtrace + + +# Defaults +# -------- + +# Set up default directories +GCEAPI_DIR=$DEST/gce-api +GCEAPI_CONF_DIR=${GCEAPI_CONF_DIR:-/etc/gceapi} +GCEAPI_CONF_FILE=${GCEAPI_CONF_DIR}/gceapi.conf +GCEAPI_DEBUG=${GCEAPI_DEBUG:-True} +GCEAPI_STATE_PATH=${GCEAPI_STATE_PATH:=$DATA_DIR/gceapi} + +GCEAPI_SERVICE_PORT=${GCEAPI_SERVICE_PORT:-8787} + +GCEAPI_RABBIT_VHOST=${GCEAPI_RABBIT_VHOST:-''} + +GCEAPI_ADMIN_USER=${GCEAPI_ADMIN_USER:-gceapi} + +GCEAPI_KEYSTONE_SIGNING_DIR=${GCEAPI_KEYSTONE_SIGNING_DIR:-/tmp/keystone-signing-gceapi} + +# Support entry points installation of console scripts +if [[ -d $GCEAPI_DIR/bin ]]; then + GCEAPI_BIN_DIR=$GCEAPI_DIR/bin +else + GCEAPI_BIN_DIR=$(get_python_exec_prefix) +fi + + +function recreate_endpoint { + local endpoint=$1 + local description=$2 + local port=$3 + + if [[ "$KEYSTONE_CATALOG_BACKEND" = 'sql' ]]; then + + # Remove nova's gce service/endpoint + local endpoint_id=$(openstack endpoint list \ + --column "ID" \ + --column "Region" \ + --column "Service Name" \ + | grep " $REGION_NAME " \ + | grep " $endpoint " | get_field 1) + if [[ -n "$endpoint_id" ]]; then + openstack endpoint delete $endpoint_id + fi + local service_id=$(openstack service list \ + -c "ID" -c "Name" \ + | grep " $endpoint " | get_field 1) + if [[ -n "$service_id" ]]; then + openstack service delete $service_id + fi + + local service_id=$(openstack service create \ + $endpoint \ + --name "$endpoint" \ + --description="$description" \ + -f value -c id) + openstack endpoint create \ + $service_id \ + --region "$REGION_NAME" \ + --publicurl "$SERVICE_PROTOCOL://$SERVICE_HOST:$port/" \ + --adminurl "$SERVICE_PROTOCOL://$SERVICE_HOST:$port/" \ + --internalurl "$SERVICE_PROTOCOL://$SERVICE_HOST:$port/" + fi +} + + +# create_gceapi_accounts() - Set up common required gceapi accounts +# +# Tenant User Roles +# ------------------------------ +# service gceapi admin +function create_gceapi_accounts() { + if ! is_service_enabled key; then + return + fi + + SERVICE_TENANT=$(openstack project list | awk "/ $SERVICE_TENANT_NAME / { print \$2 }") + ADMIN_ROLE=$(openstack role list | awk "/ admin / { print \$2 }") + + GCEAPI_USER=$(openstack user create \ + $GCEAPI_ADMIN_USER \ + --password "$SERVICE_PASSWORD" \ + --project $SERVICE_TENANT \ + --email gceapi@example.com \ + | grep " id " | get_field 2) + + openstack role add \ + $ADMIN_ROLE \ + --project $SERVICE_TENANT \ + --user $GCEAPI_USER + + recreate_endpoint "gce" "GCE Compatibility Layer" $GCEAPI_SERVICE_PORT +} + + +function mkdir_chown_stack { + if [[ ! -d "$1" ]]; then + sudo mkdir -p "$1" + fi + sudo chown $STACK_USER "$1" +} + + +function configure_gceapi_rpc_backend() { + # Configure the rpc service. + iniset_rpc_backend gceapi $GCEAPI_CONF_FILE DEFAULT + + # TODO(ruhe): get rid of this ugly workaround. + inicomment $GCEAPI_CONF_FILE DEFAULT rpc_backend + + # Set non-default rabbit virtual host if required. + if [[ -n "$GCEAPI_RABBIT_VHOST" ]]; then + iniset $GCEAPI_CONF_FILE DEFAULT rabbit_virtual_host $GCEAPI_RABBIT_VHOST + fi +} + +function configure_gceapi_networking { + # Use keyword 'public' if gceapi external network was not set. + # If it was set but the network is not exist then + # first available external network will be selected. + local ext_net=${GCEAPI_EXTERNAL_NETWORK:-'public'} + # Configure networking options for gceapi + if [[ -n "$ext_net" ]]; then + iniset $GCEAPI_CONF_FILE DEFAULT public_network $ext_net + fi + + if [[ ,${ENABLED_SERVICES} =~ ,"q-" ]]; then + iniset $GCEAPI_CONF_FILE DEFAULT network_api quantum + else + iniset $GCEAPI_CONF_FILE DEFAULT network_api nova + fi +} + +# Entry points +# ------------ + +# configure_gceapi() - Set config files, create data dirs, etc +function configure_gceapi { + mkdir_chown_stack "$GCEAPI_CONF_DIR" + + # Generate gceapi configuration file and configure common parameters. + touch $GCEAPI_CONF_FILE + cp $GCEAPI_DIR/etc/gceapi/api-paste.ini $GCEAPI_CONF_DIR + + cleanup_gceapi + + iniset $GCEAPI_CONF_FILE DEFAULT debug $GCEAPI_DEBUG + iniset $GCEAPI_CONF_FILE DEFAULT use_syslog $SYSLOG + iniset $GCEAPI_CONF_FILE DEFAULT state_path $GCEAPI_STATE_PATH + + + # gceapi Api Configuration + #------------------------- + + iniset $GCEAPI_CONF_FILE DEFAULT region $REGION_NAME + + #iniset $GCEAPI_CONF_FILE DEFAULT admin_tenant_name $SERVICE_TENANT_NAME + #iniset $GCEAPI_CONF_FILE DEFAULT admin_user $GCEAPI_ADMIN_USER + #iniset $GCEAPI_CONF_FILE DEFAULT admin_password $SERVICE_PASSWORD + #iniset $GCEAPI_CONF_FILE DEFAULT keystone_url "http://${KEYSTONE_AUTH_HOST}:35357/v2.0" + + configure_gceapi_rpc_backend + + # configure the database. + iniset $GCEAPI_CONF_FILE database connection `database_connection_url gceapi` + + configure_gceapi_networking +} + + +# init_gceapi() - Initialize databases, etc. +function init_gceapi() { + # (re)create gceapi database + recreate_database gceapi utf8 + + $GCEAPI_BIN_DIR/gce-api-manage --config-file $GCEAPI_CONF_FILE db_sync +} + + +# install_gceapi() - Collect source and prepare +function install_gceapi() { + # TODO(ruhe): use setup_develop once gceapi requirements match with global-requirement.txt + # both functions (setup_develop and setup_package) are defined at: + # http://git.openstack.org/cgit/openstack-dev/devstack/tree/functions-common + setup_package $GCEAPI_DIR -e +} + + +# start_gceapi() - Start running processes, including screen +function start_gceapi() { + screen_it gce-api "cd $GCEAPI_DIR && $GCEAPI_BIN_DIR/gce-api --config-file $GCEAPI_CONF_DIR/gceapi.conf" +} + + +# stop_gceapi() - Stop running processes +function stop_gceapi() { + # Kill the gceapi screen windows + screen -S $SCREEN_NAME -p gce-api -X kill +} + +function cleanup_gceapi() { + + # Cleanup keystone signing dir + sudo rm -rf $GCEAPI_KEYSTONE_SIGNING_DIR +} + +# main dispatcher +if [[ "$1" == "stack" && "$2" == "install" ]]; then + echo_summary "Installing gce-api" + install_gceapi +elif [[ "$1" == "stack" && "$2" == "post-config" ]]; then + echo_summary "Configuring gce-api" + configure_gceapi + create_gceapi_accounts +elif [[ "$1" == "stack" && "$2" == "extra" ]]; then + echo_summary "Initializing gce-api" + init_gceapi + start_gceapi +fi + +if [[ "$1" == "unstack" ]]; then + stop_gceapi + cleanup_gceapi +fi + +# Restore xtrace +$XTRACE + +# Local variables: +# mode: shell-script +# End: diff --git a/devstack/settings b/devstack/settings new file mode 100644 index 0000000..86b26c8 --- /dev/null +++ b/devstack/settings @@ -0,0 +1,4 @@ +# Devstack settings + +# we have to add gce-api to enabled services for screen_it to work +enable_service gce-api diff --git a/etc/gceapi/README-ec2api.conf.txt b/etc/gceapi/README-ec2api.conf.txt new file mode 100644 index 0000000..6fe9347 --- /dev/null +++ b/etc/gceapi/README-ec2api.conf.txt @@ -0,0 +1,4 @@ +To generate the sample gceapi.conf file, run the following +command from the top level of the gceapi directory: + +tox -egenconfig diff --git a/etc/gceapi/gceapi-config-generator.conf b/etc/gceapi/gceapi-config-generator.conf new file mode 100644 index 0000000..5c454f3 --- /dev/null +++ b/etc/gceapi/gceapi-config-generator.conf @@ -0,0 +1,10 @@ +[DEFAULT] +output_file = etc/gceapi/gceapi.conf.sample +wrap_width = 79 +namespace = gceapi +namespace = gceapi.api +namespace = oslo.log +namespace = oslo.messaging +namespace = oslo.service.service +namespace = oslo.db +namespace = oslo.concurrency diff --git a/etc/gceapi/gceapi.conf.sample b/etc/gceapi/gceapi.conf.sample deleted file mode 100644 index 125acaa..0000000 --- a/etc/gceapi/gceapi.conf.sample +++ /dev/null @@ -1,284 +0,0 @@ -[DEFAULT] - -# -# Options defined in gceapi.auth -# - -# whether to use per-user rate limiting for the api. (boolean -# value) -#api_rate_limit=false - -# The strategy to use for auth: keystone only for now. (string -# value) -#auth_strategy=keystone - -# Treat X-Forwarded-For as the canonical remote address. Only -# enable this if you have a sanitizing proxy. (boolean value) -#use_forwarded_for=false - - -# -# Options defined in gceapi.exception -# - -# make exception message format errors fatal (boolean value) -#fatal_exception_format_errors=false - - -# -# Options defined in gceapi.paths -# - -# Directory where the gceapi python module is installed -# (string value) -#pybasedir=/usr/lib/python/site-packages - -# Directory where gceapi binaries are installed (string value) -#bindir=$pybasedir/bin - -# Top-level directory for maintaining gceapi's state (string -# value) -#state_path=$pybasedir - - -# -# Options defined in gceapi.service -# - -# Enable ssl connections or not (boolean value) -#use_ssl=false - -# maximum time since last check-in for up service (integer -# value) -#service_down_time=60 - -# IP address for gce api to listen (string value) -#gce_listen=0.0.0.0 - -# port for gce api to listen (integer value) -#gce_listen_port=8787 - - -# -# Options defined in gceapi.wsgi -# - -# File name for the paste.deploy config for gceapi-api (string -# value) -#api_paste_config=api-paste.ini - -# A python format string that is used as the template to -# generate log lines. The following values can be formatted -# into it: client_ip, date_time, request_line, status_code, -# body_length, wall_seconds. (string value) -#wsgi_log_format=%(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f - -# CA certificate file to use to verify connecting clients -# (string value) -#ssl_ca_file= - -# SSL certificate of API server (string value) -#ssl_cert_file= - -# SSL private key of API server (string value) -#ssl_key_file= - -# Sets the value of TCP_KEEPIDLE in seconds for each server -# socket. Not supported on OS X. (integer value) -#tcp_keepidle=600 - - -# -# Options defined in gceapi.api -# - -# Name of network API. neutron(quantum) or nova (string value) -#network_api=neutron - -# Keystone URL (string value) -#keystone_gce_url=http://127.0.0.1:5000/v2.0 - -# name of public network (string value) -#public_network=public - -# Place of protocol files (string value) -#protocol_dir= - -# Region of this service -#region=RegionOne - - -# -# Options defined in gceapi.openstack.common.db.sqlalchemy.session -# - -# The file name to use with SQLite (string value) -#sqlite_db=gceapi.sqlite - -# If True, SQLite uses synchronous mode (boolean value) -#sqlite_synchronous=true - - -# -# Options defined in gceapi.openstack.common.eventlet_backdoor -# - -# Enable eventlet backdoor. Acceptable values are 0, , -# and :, where 0 results in listening on a random -# tcp port number; results in listening on the -# specified port number (and not enabling backdoor if that -# port is in use); and : results in listening on -# the smallest unused port number within the specified range -# of port numbers. The chosen port is displayed in the -# service's log file. (string value) -#backdoor_port= - - -# -# Options defined in gceapi.openstack.common.log -# - -# Print debugging output (set logging level to DEBUG instead -# of default WARNING level). (boolean value) -#debug=false - -# Print more verbose output (set logging level to INFO instead -# of default WARNING level). (boolean value) -#verbose=false - -# Log output to standard error (boolean value) -#use_stderr=true - -# Format string to use for log messages with context (string -# value) -#logging_context_format_string=%(asctime)s.%(msecs)03d %(process)d %(levelname)s %(name)s [%(request_id)s %(user_identity)s] %(instance)s%(message)s - -# Format string to use for log messages without context -# (string value) -#logging_default_format_string=%(asctime)s.%(msecs)03d %(process)d %(levelname)s %(name)s [-] %(instance)s%(message)s - -# Data to append to log format when level is DEBUG (string -# value) -#logging_debug_format_suffix=%(funcName)s %(pathname)s:%(lineno)d - -# Prefix each line of exception output with this format -# (string value) -#logging_exception_prefix=%(asctime)s.%(msecs)03d %(process)d TRACE %(name)s %(instance)s - -# List of logger=LEVEL pairs (list value) -#default_log_levels=amqp=WARN,amqplib=WARN,boto=WARN,qpid=WARN,sqlalchemy=WARN,suds=INFO,iso8601=WARN,requests.packages.urllib3.connectionpool=WARN - -# Publish error events (boolean value) -#publish_errors=false - -# Make deprecations fatal (boolean value) -#fatal_deprecations=false - -# If an instance is passed with the log message, format it -# like this (string value) -#instance_format="[instance: %(uuid)s] " - -# If an instance UUID is passed with the log message, format -# it like this (string value) -#instance_uuid_format="[instance: %(uuid)s] " - -# The name of logging configuration file. It does not disable -# existing loggers, but just appends specified logging -# configuration to any other existing logging options. Please -# see the Python logging module documentation for details on -# logging configuration files. (string value) -#log_config_append= - -# DEPRECATED. A logging.Formatter log message format string -# which may use any of the available logging.LogRecord -# attributes. This option is deprecated. Please use -# logging_context_format_string and -# logging_default_format_string instead. (string value) -#log_format= - -# Format string for %%(asctime)s in log records. Default: -# %(default)s (string value) -#log_date_format=%Y-%m-%d %H:%M:%S - -# (Optional) Name of log file to output to. If no default is -# set, logging will go to stdout. (string value) -#log_file= - -# (Optional) The base directory used for relative --log-file -# paths (string value) -#log_dir= - -# Use syslog for logging. Existing syslog format is DEPRECATED -# during I, and then will be changed in J to honor RFC5424 -# (boolean value) -#use_syslog=false - -# (Optional) Use syslog rfc5424 format for logging. If -# enabled, will add APP-NAME (RFC5424) before the MSG part of -# the syslog message. The old format without APP-NAME is -# deprecated in I, and will be removed in J. (boolean value) -#use_syslog_rfc_format=false - -# Syslog facility to receive log lines (string value) -#syslog_log_facility=LOG_USER - - -[database] - -# -# Options defined in gceapi.openstack.common.db.api -# - -# The backend to use for db (string value) -#backend=sqlalchemy - - -# -# Options defined in gceapi.openstack.common.db.sqlalchemy.session -# - -# The SQLAlchemy connection string used to connect to the -# database (string value) -#connection=sqlite:////gceapi/openstack/common/db/$sqlite_db - -# The SQLAlchemy connection string used to connect to the -# slave database (string value) -#slave_connection= - -# Timeout before idle sql connections are reaped (integer -# value) -#idle_timeout=3600 - -# Minimum number of SQL connections to keep open in a pool -# (integer value) -#min_pool_size=1 - -# Maximum number of SQL connections to keep open in a pool -# (integer value) -#max_pool_size= - -# Maximum db connection retries during startup. (setting -1 -# implies an infinite retry count) (integer value) -#max_retries=10 - -# Interval between retries of opening a sql connection -# (integer value) -#retry_interval=10 - -# If set, use this value for max_overflow with sqlalchemy -# (integer value) -#max_overflow= - -# Verbosity of SQL debugging information. 0=None, -# 100=Everything (integer value) -#connection_debug=0 - -# Add python stack traces to SQL as comment strings (boolean -# value) -#connection_trace=false - -# If set, use this value for pool_timeout with sqlalchemy -# (integer value) -#pool_timeout= - - diff --git a/gceapi/api/__init__.py b/gceapi/api/__init__.py index 4569432..7523c3a 100644 --- a/gceapi/api/__init__.py +++ b/gceapi/api/__init__.py @@ -12,7 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from oslo.config import cfg +from oslo_config import cfg +from oslo_log import log as logging from gceapi.api import addresses from gceapi.api import discovery @@ -29,7 +30,6 @@ from gceapi.api import regions from gceapi.api import routes from gceapi.api import snapshots from gceapi.api import zones -from gceapi.openstack.common import log as logging from gceapi import wsgi from gceapi import wsgi_ext as openstack_api diff --git a/gceapi/api/address_neutron_api.py b/gceapi/api/address_neutron_api.py index 96871fc..6148dd2 100644 --- a/gceapi/api/address_neutron_api.py +++ b/gceapi/api/address_neutron_api.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from oslo.config import cfg +from oslo_config import cfg from gceapi.api import base_api from gceapi.api import clients @@ -21,7 +21,7 @@ from gceapi.api import operation_util from gceapi.api import region_api from gceapi.api import scopes from gceapi import exception -from gceapi.openstack.common.gettextutils import _ +from gceapi.i18n import _ CONF = cfg.CONF diff --git a/gceapi/api/address_nova_api.py b/gceapi/api/address_nova_api.py index e8ab890..73a6b0b 100644 --- a/gceapi/api/address_nova_api.py +++ b/gceapi/api/address_nova_api.py @@ -19,7 +19,7 @@ from gceapi.api import region_api from gceapi.api import scopes from gceapi.api import utils from gceapi import exception -from gceapi.openstack.common.gettextutils import _ +from gceapi.i18n import _ class API(base_api.API): diff --git a/gceapi/api/base_api.py b/gceapi/api/base_api.py index e445d33..f3a7278 100644 --- a/gceapi/api/base_api.py +++ b/gceapi/api/base_api.py @@ -18,11 +18,11 @@ Classes in this layer aggregate functionality of OpenStack necessary and sufficient to handle supported GCE API requests """ -from oslo.config import cfg +from oslo_config import cfg +from oslo_utils import timeutils from gceapi import db from gceapi import exception -from gceapi.openstack.common import timeutils FLAGS = cfg.CONF diff --git a/gceapi/api/clients.py b/gceapi/api/clients.py index 8262e44..6fcd78d 100644 --- a/gceapi/api/clients.py +++ b/gceapi/api/clients.py @@ -1,5 +1,3 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain @@ -15,11 +13,11 @@ from keystoneclient.v2_0 import client as kc from novaclient import client as novaclient -from novaclient import shell as novashell -from oslo.config import cfg +from novaclient import exceptions as nova_exception +from oslo_config import cfg +from oslo_log import log as logging -from gceapi.openstack.common.gettextutils import _ -from gceapi.openstack.common import log as logging +from gceapi.i18n import _, _LW logger = logging.getLogger(__name__) @@ -43,26 +41,25 @@ except ImportError: logger.info(_('glanceclient not available')) +# Nova API version with microversions support +REQUIRED_NOVA_API_VERSION = '2.1' +LEGACY_NOVA_API_VERSION = '2' +# Nova API's 2.3 microversion provides additional EC2 compliant instance +# properties +REQUIRED_NOVA_API_MICROVERSION = '2.3' +_nova_api_version = None + + def nova(context, service_type='compute'): - computeshell = novashell.OpenStackComputeShell() - extensions = computeshell._discover_extensions("1.1") - args = { - 'project_id': context.project_id, 'auth_url': CONF.keystone_gce_url, - 'service_type': service_type, - 'username': None, - 'api_key': None, - 'extensions': extensions, + 'auth_token': context.auth_token, + 'bypass_url': url_for(context, service_type), } - - client = novaclient.Client(1.1, **args) - - management_url = get_endpoint(context, service_type) - client.client.auth_token = context.auth_token - client.client.management_url = management_url - - return client + global _nova_api_version + if not _nova_api_version: + _nova_api_version = _get_nova_api_version(context) + return novaclient.Client(_nova_api_version, **args) def neutron(context): @@ -73,7 +70,7 @@ def neutron(context): 'auth_url': CONF.keystone_gce_url, 'service_type': 'network', 'token': context.auth_token, - 'endpoint_url': get_endpoint(context, 'network'), + 'endpoint_url': url_for(context, 'network'), } return neutronclient.Client(**args) @@ -90,7 +87,7 @@ def glance(context): } return glanceclient.Client( - "1", endpoint=get_endpoint(context, 'image'), **args) + "1", endpoint=url_for(context, 'image'), **args) def cinder(context): @@ -105,7 +102,7 @@ def cinder(context): } _cinder = cinderclient.Client('1', **args) - management_url = get_endpoint(context, 'volume') + management_url = url_for(context, 'volume') _cinder.client.auth_token = context.auth_token _cinder.client.management_url = management_url @@ -113,33 +110,47 @@ def cinder(context): def keystone(context): - _keystone = kc.Client( + return kc.Client( token=context.auth_token, + project_id=context.project_id, tenant_id=context.project_id, auth_url=CONF.keystone_gce_url) - return _keystone + +def url_for(context, service_type): + service_catalog = context.service_catalog + if not service_catalog: + catalog = keystone(context).service_catalog.catalog + service_catalog = catalog['serviceCatalog'] + context.service_catalog = service_catalog + return get_url_from_catalog(service_catalog, service_type) -def get_endpoint_from_catalog(service_catalog, service_type): +def get_url_from_catalog(service_catalog, service_type): for service in service_catalog: - if service["type"] != service_type: + if service['type'] != service_type: continue - for endpoint in service["endpoints"]: - if endpoint["region"] != CONF["region"]: - continue - return endpoint.get("publicURL") - - return None + for endpoint in service['endpoints']: + if 'publicURL' in endpoint: + return endpoint['publicURL'] + elif endpoint.get('interface') == 'public': + # NOTE(andrey-mp): keystone v3 + return endpoint['url'] + else: + return None return None -def get_endpoint(context, service_type): - service_catalog = context.service_catalog - if not service_catalog: - catalog = keystone(context).service_catalog.catalog - service_catalog = catalog["serviceCatalog"] - context.service_catalog = service_catalog +def _get_nova_api_version(context): + try: + novaclient.Client(REQUIRED_NOVA_API_VERSION) + except nova_exception.UnsupportedVersion: + logger.warning( + _LW('Nova client does not support v2.1 Nova API, use v2 instead. ' + 'A lot of useful EC2 compliant instance properties ' + 'will be unavailable.')) + return LEGACY_NOVA_API_VERSION - return get_endpoint_from_catalog(service_catalog, service_type) + # NOTE(ft): novaclient supports microversions, use the last required one + return REQUIRED_NOVA_API_MICROVERSION diff --git a/gceapi/api/common.py b/gceapi/api/common.py index 6e88899..b2d5b66 100644 --- a/gceapi/api/common.py +++ b/gceapi/api/common.py @@ -18,7 +18,9 @@ import os.path import re from webob import exc -from oslo.config import cfg +from oslo_config import cfg +from oslo_log import log as logging +from oslo_utils import timeutils from gceapi.api import clients from gceapi.api import operation_api @@ -26,9 +28,7 @@ from gceapi.api import operation_util from gceapi.api import scopes from gceapi.api import utils from gceapi import exception -from gceapi.openstack.common.gettextutils import _ -from gceapi.openstack.common import log as logging -from gceapi.openstack.common import timeutils +from gceapi.i18n import _ LOG = logging.getLogger(__name__) FLAGS = cfg.CONF @@ -268,7 +268,7 @@ class Controller(object): """ context = self._get_context(request) - public_url = clients.get_endpoint(context, "gceapi") + public_url = clients.url_for(context, "gceapi") if public_url: public_url = public_url.rstrip("/") + "/"\ + request.script_name.lstrip("/") diff --git a/gceapi/api/discovery.py b/gceapi/api/discovery.py index de70343..989bf4f 100644 --- a/gceapi/api/discovery.py +++ b/gceapi/api/discovery.py @@ -18,10 +18,10 @@ import threading import webob from keystoneclient.v2_0 import client as keystone_client -from oslo.config import cfg +from oslo_config import cfg +from oslo_log import log as logging from gceapi.api import clients -from gceapi.openstack.common import log as logging from gceapi import wsgi_ext as openstack_wsgi LOG = logging.getLogger(__name__) @@ -46,7 +46,7 @@ class Controller(object): keystone = keystone_client.Client(username=user, password=password, tenant_name=tenant, auth_url=FLAGS.keystone_gce_url) catalog = keystone.service_catalog.catalog["serviceCatalog"] - public_url = clients.get_endpoint_from_catalog(catalog, "gceapi") + public_url = clients.get_url_from_catalog(catalog, "gceapi") if not public_url: public_url = req.host_url public_url = public_url.rstrip("/") diff --git a/gceapi/api/firewall_api.py b/gceapi/api/firewall_api.py index e4257ec..8517397 100644 --- a/gceapi/api/firewall_api.py +++ b/gceapi/api/firewall_api.py @@ -14,14 +14,15 @@ import copy +from oslo_log import log as logging + from gceapi.api import base_api from gceapi.api import clients from gceapi.api import network_api from gceapi.api import operation_util from gceapi.api import utils from gceapi import exception -from gceapi.openstack.common.gettextutils import _ -from gceapi.openstack.common import log as logging +from gceapi.i18n import _ PROTOCOL_MAP = { @@ -175,8 +176,8 @@ class API(base_api.API): if not too_complex_for_gce: sourceRanges = [cidr for cidr in grouped_rules] or ["0.0.0.0/0"] if common_rules: - allowed = [_build_gce_port_rule(proto, common_rules[proto]) - for proto in common_rules] + allowed = [_build_gce_port_rule(p, common_rules[p]) + for p in common_rules] firewall["sourceRanges"] = sourceRanges firewall["allowed"] = allowed diff --git a/gceapi/api/image_api.py b/gceapi/api/image_api.py index 1e95d13..b771f16 100644 --- a/gceapi/api/image_api.py +++ b/gceapi/api/image_api.py @@ -23,7 +23,7 @@ from gceapi.api import operation_api from gceapi.api import operation_util from gceapi.api import utils from gceapi import exception -from gceapi.openstack.common.gettextutils import _ +from gceapi.i18n import _ class API(base_api.API): diff --git a/gceapi/api/instance_address_api.py b/gceapi/api/instance_address_api.py index 82edd96..0c1992f 100644 --- a/gceapi/api/instance_address_api.py +++ b/gceapi/api/instance_address_api.py @@ -12,12 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. +from oslo_log import log as logging + from gceapi.api import base_api from gceapi.api import clients from gceapi.api import operation_util from gceapi import exception -from gceapi.openstack.common.gettextutils import _ -from gceapi.openstack.common import log as logging +from gceapi.i18n import _ LOG = logging.getLogger(__name__) diff --git a/gceapi/api/instance_api.py b/gceapi/api/instance_api.py index 5cc48f0..187e946 100644 --- a/gceapi/api/instance_api.py +++ b/gceapi/api/instance_api.py @@ -15,6 +15,8 @@ import random import string +from oslo_log import log as logging + from gceapi.api import base_api from gceapi.api import clients from gceapi.api import disk_api @@ -29,8 +31,7 @@ from gceapi.api import project_api from gceapi.api import scopes from gceapi.api import utils from gceapi import exception -from gceapi.openstack.common.gettextutils import _ -from gceapi.openstack.common import log as logging +from gceapi.i18n import _ LOG = logging.getLogger(__name__) diff --git a/gceapi/api/instance_disk_api.py b/gceapi/api/instance_disk_api.py index b052cf5..c70fe33 100644 --- a/gceapi/api/instance_disk_api.py +++ b/gceapi/api/instance_disk_api.py @@ -14,6 +14,8 @@ import string +from oslo_log import log as logging + from gceapi.api import base_api from gceapi.api import clients from gceapi.api import disk_api @@ -21,8 +23,7 @@ from gceapi.api import operation_api from gceapi.api import operation_util from gceapi.api import utils from gceapi import exception -from gceapi.openstack.common.gettextutils import _ -from gceapi.openstack.common import log as logging +from gceapi.i18n import _ LOG = logging.getLogger(__name__) diff --git a/gceapi/api/instances.py b/gceapi/api/instances.py index a635cba..02da348 100644 --- a/gceapi/api/instances.py +++ b/gceapi/api/instances.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +from oslo_log import log as logging import webob from gceapi.api import common as gce_common @@ -22,8 +23,7 @@ from gceapi.api import operation_util from gceapi.api import scopes from gceapi.api import wsgi as gce_wsgi from gceapi import exception -from gceapi.openstack.common.gettextutils import _ -from gceapi.openstack.common import log as logging +from gceapi.i18n import _ logger = logging.getLogger(__name__) diff --git a/gceapi/api/network_neutron_api.py b/gceapi/api/network_neutron_api.py index 46608c0..99cdd25 100644 --- a/gceapi/api/network_neutron_api.py +++ b/gceapi/api/network_neutron_api.py @@ -13,14 +13,14 @@ # limitations under the License. import netaddr -from oslo.config import cfg +from oslo_config import cfg +from oslo_log import log as logging from gceapi.api import base_api from gceapi.api import clients from gceapi.api import operation_util from gceapi import exception -from gceapi.openstack.common.gettextutils import _ -from gceapi.openstack.common import log as logging +from gceapi.i18n import _ CONF = cfg.CONF diff --git a/gceapi/api/network_nova_api.py b/gceapi/api/network_nova_api.py index 91c3b6a..e896bc8 100644 --- a/gceapi/api/network_nova_api.py +++ b/gceapi/api/network_nova_api.py @@ -19,7 +19,7 @@ from gceapi.api import clients from gceapi.api import operation_util from gceapi.api import utils from gceapi import exception -from gceapi.openstack.common.gettextutils import _ +from gceapi.i18n import _ class API(base_api.API): diff --git a/gceapi/api/oauth.py b/gceapi/api/oauth.py index 11ac30a..343f33b 100644 --- a/gceapi/api/oauth.py +++ b/gceapi/api/oauth.py @@ -19,12 +19,12 @@ import uuid from keystoneclient import exceptions from keystoneclient.v2_0 import client as keystone_client -from oslo.config import cfg +from oslo_config import cfg +from oslo_log import log as logging +from oslo_utils import timeutils import webob -from gceapi.openstack.common.gettextutils import _ -from gceapi.openstack.common import log as logging -from gceapi.openstack.common import timeutils +from gceapi.i18n import _ from gceapi import wsgi_ext as openstack_wsgi FLAGS = cfg.CONF @@ -76,7 +76,7 @@ class Controller(object): ""\ "" - class Client: + class Client(object): auth_start_time = 0 auth_token = None expires_in = 1 diff --git a/gceapi/api/operation_api.py b/gceapi/api/operation_api.py index 81bfe99..81ae45b 100644 --- a/gceapi/api/operation_api.py +++ b/gceapi/api/operation_api.py @@ -14,11 +14,12 @@ import uuid +from oslo_utils import timeutils + from gceapi.api import base_api from gceapi.api import scopes from gceapi import exception -from gceapi.openstack.common.gettextutils import _ -from gceapi.openstack.common import timeutils +from gceapi.i18n import _ class API(base_api.API): diff --git a/gceapi/api/operation_util.py b/gceapi/api/operation_util.py index c572be3..be11f61 100644 --- a/gceapi/api/operation_util.py +++ b/gceapi/api/operation_util.py @@ -14,9 +14,10 @@ import threading +from oslo_utils import timeutils + from gceapi.api import operation_api -from gceapi.openstack.common.gettextutils import _ -from gceapi.openstack.common import timeutils +from gceapi.i18n import _ def init_operation(context, op_type, target_type, target_name, scope): diff --git a/gceapi/api/project_api.py b/gceapi/api/project_api.py index f1e8218..7a7c737 100644 --- a/gceapi/api/project_api.py +++ b/gceapi/api/project_api.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from oslo.config import cfg +from oslo_config import cfg from gceapi.api import base_api from gceapi.api import clients diff --git a/gceapi/api/projects.py b/gceapi/api/projects.py index 873f219..448f4ae 100644 --- a/gceapi/api/projects.py +++ b/gceapi/api/projects.py @@ -21,7 +21,7 @@ from gceapi.api import operation_util from gceapi.api import project_api from gceapi.api import scopes from gceapi.api import wsgi as gce_wsgi -from gceapi.openstack.common.gettextutils import _ +from gceapi.i18n import _ class Controller(gce_common.Controller): diff --git a/gceapi/api/region_api.py b/gceapi/api/region_api.py index 3133a1e..3c2b0e1 100644 --- a/gceapi/api/region_api.py +++ b/gceapi/api/region_api.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from oslo.config import cfg +from oslo_config import cfg from gceapi.api import base_api from gceapi.api import scopes diff --git a/gceapi/api/route_neutron_api.py b/gceapi/api/route_neutron_api.py index 3eabf5d..64ca75a 100644 --- a/gceapi/api/route_neutron_api.py +++ b/gceapi/api/route_neutron_api.py @@ -21,7 +21,7 @@ from gceapi.api import network_api from gceapi.api import operation_util from gceapi.api import utils from gceapi import exception -from gceapi.openstack.common.gettextutils import _ +from gceapi.i18n import _ ALL_IP_CIDR = "0.0.0.0/0" diff --git a/gceapi/api/route_nova_api.py b/gceapi/api/route_nova_api.py index aef96f6..b09cc51 100644 --- a/gceapi/api/route_nova_api.py +++ b/gceapi/api/route_nova_api.py @@ -14,7 +14,7 @@ from gceapi.api import base_api from gceapi import exception -from gceapi.openstack.common.gettextutils import _ +from gceapi.i18n import _ NOT_SUPPORTED_MESSAGE = _("Routes are not supported with nova network") diff --git a/gceapi/api/wsgi.py b/gceapi/api/wsgi.py index ca07be6..cb4611e 100644 --- a/gceapi/api/wsgi.py +++ b/gceapi/api/wsgi.py @@ -12,12 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. +from oslo_log import log as logging +from oslo_serialization import jsonutils import webob from gceapi import exception -from gceapi.openstack.common.gettextutils import _ -from gceapi.openstack.common import jsonutils -from gceapi.openstack.common import log as logging +from gceapi.i18n import _ from gceapi import wsgi_ext as openstack_wsgi LOG = logging.getLogger(__name__) diff --git a/gceapi/auth.py b/gceapi/auth.py index 44ed330..861b6e5 100644 --- a/gceapi/auth.py +++ b/gceapi/auth.py @@ -18,14 +18,14 @@ Common Auth Middleware. """ -from oslo.config import cfg +from oslo_config import cfg +from oslo_log import log as logging +from oslo_serialization import jsonutils import webob.dec import webob.exc from gceapi import context -from gceapi.openstack.common.gettextutils import _ -from gceapi.openstack.common import jsonutils -from gceapi.openstack.common import log as logging +from gceapi.i18n import _ from gceapi import wsgi diff --git a/gceapi/cmd/__init__.py b/gceapi/cmd/__init__.py index 1e8cb71..e69de29 100644 --- a/gceapi/cmd/__init__.py +++ b/gceapi/cmd/__init__.py @@ -1,19 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2013 OpenStack LLC. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from gceapi.openstack.common import gettextutils -gettextutils.install('gceapi') diff --git a/gceapi/cmd/api.py b/gceapi/cmd/api.py index 6fb019a..6382916 100644 --- a/gceapi/cmd/api.py +++ b/gceapi/cmd/api.py @@ -1,48 +1,37 @@ -#!/usr/bin/env python -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# Copyright 2011 OpenStack LLC. -# All Rights Reserved. +# Copyright 2014 +# The Cloudscaling Group, Inc. # -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 # -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. """ -Gceapi API Server +GCEapi API Server """ -import eventlet import sys -eventlet.patcher.monkey_patch(os=False) - -from oslo.config import cfg +from oslo_config import cfg +from oslo_log import log as logging from gceapi import config -from gceapi.openstack.common import log as logging from gceapi import service CONF = cfg.CONF -CONF.import_opt('use_ssl', 'gceapi.service') def main(): config.parse_args(sys.argv) - logging.setup('gceapi') + logging.setup(CONF, 'gceapi') - server = service.WSGIService( - 'gce', use_ssl=CONF.use_ssl, max_url_len=16384) + server = service.WSGIService('gceapi', max_url_len=16384) service.serve(server) service.wait() diff --git a/gceapi/cmd/manage.py b/gceapi/cmd/manage.py index 9b7311e..a7d2d58 100644 --- a/gceapi/cmd/manage.py +++ b/gceapi/cmd/manage.py @@ -1,29 +1,31 @@ -# Copyright 2014 -# The Cloudscaling Group, Inc. +# Copyright 2013 Cloudscaling Group, Inc # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at # -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. """ CLI interface for GCE API management. """ +import os import sys -from oslo.config import cfg +from oslo_config import cfg +from oslo_log import log +from gceapi import config from gceapi.db import migration -from gceapi.openstack.common import log -from gceapi import version +from gceapi.i18n import _ CONF = cfg.CONF @@ -35,9 +37,9 @@ def do_db_version(): def do_db_sync(): - """ - Place a database under migration control and upgrade, - creating first if necessary. + """Place a database under migration control and upgrade, + + creating if necessary. """ migration.db_sync(CONF.command.version) @@ -61,13 +63,20 @@ command_opt = cfg.SubCommandOpt('command', def main(): CONF.register_cli_opt(command_opt) try: - default_config_files = cfg.find_config_files('gceapi') - CONF(sys.argv[1:], project='gceapi', prog='gce-api-manage', - version=version.version_info.version_string(), - default_config_files=default_config_files) - log.setup("gceapi") - except RuntimeError as e: - sys.exit("ERROR: %s" % e) + config.parse_args(sys.argv) + log.setup(CONF, "gceapi") + except cfg.ConfigFilesNotFoundError: + cfgfile = CONF.config_file[-1] if CONF.config_file else None + if cfgfile and not os.access(cfgfile, os.R_OK): + st = os.stat(cfgfile) + print(_("Could not read %s. Re-running with sudo") % cfgfile) + try: + os.execvp('sudo', ['sudo', '-u', '#%s' % st.st_uid] + sys.argv) + except Exception: + print(_('sudo failed, continuing as if nothing happened')) + + print(_('Please re-run gce-api-manage as root.')) + return(2) try: CONF.command.func() diff --git a/gceapi/config.py b/gceapi/config.py index a749526..9be4f38 100644 --- a/gceapi/config.py +++ b/gceapi/config.py @@ -1,34 +1,49 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# All Rights Reserved. -# Copyright 2012 Red Hat, Inc. +# Copyright 2014 +# The Cloudscaling Group, Inc. # -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 # -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. -from oslo.config import cfg +from oslo_config import cfg +from oslo_db import options +from oslo_log import log -from gceapi.openstack.common.db.sqlalchemy import session as db_session from gceapi import paths from gceapi import version -_DEFAULT_SQL_CONNECTION = 'sqlite:///' + paths.state_path_def('$sqlite_db') + +CONF = cfg.CONF + +_DEFAULT_SQL_CONNECTION = 'sqlite:///' + paths.state_path_def('gceapi.sqlite') + +_DEFAULT_LOG_LEVELS = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', + 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', + 'oslo.messaging=INFO', 'iso8601=WARN', + 'requests.packages.urllib3.connectionpool=WARN', + 'urllib3.connectionpool=WARN', 'websocket=WARN', + 'keystonemiddleware=WARN', 'routes.middleware=WARN', + 'stevedore=WARN', 'keystoneclient.auth=WARN'] + +_DEFAULT_LOGGING_CONTEXT_FORMAT = ('%(asctime)s.%(msecs)03d %(process)d ' + '%(levelname)s %(name)s [%(request_id)s ' + '%(user_identity)s] %(instance)s' + '%(message)s') def parse_args(argv, default_config_files=None): - db_session.set_defaults(sql_connection=_DEFAULT_SQL_CONNECTION, - sqlite_db='gceapi.sqlite') + log.set_defaults(_DEFAULT_LOGGING_CONTEXT_FORMAT, _DEFAULT_LOG_LEVELS) + log.register_options(CONF) + options.set_defaults(CONF, connection=_DEFAULT_SQL_CONNECTION, + sqlite_db='gceapi.sqlite') + cfg.CONF(argv[1:], project='gceapi', version=version.version_info.version_string(), diff --git a/gceapi/context.py b/gceapi/context.py index 6df6b50..fa9fba5 100644 --- a/gceapi/context.py +++ b/gceapi/context.py @@ -21,11 +21,12 @@ import uuid +from oslo_log import log as logging +from oslo_utils import timeutils + from gceapi import exception -from gceapi.openstack.common.gettextutils import _ +from gceapi.i18n import _ from gceapi.openstack.common import local -from gceapi.openstack.common import log as logging -from gceapi.openstack.common import timeutils LOG = logging.getLogger(__name__) diff --git a/gceapi/db/api.py b/gceapi/db/api.py index beb3fb1..c2566c2 100644 --- a/gceapi/db/api.py +++ b/gceapi/db/api.py @@ -27,11 +27,56 @@ functions from gceapi.db namespace, not the gceapi.db.api namespace. """ -from gceapi.openstack.common.db import api as db_api +from eventlet import tpool +from oslo_config import cfg +from oslo_db import api as db_api +from oslo_log import log as logging +tpool_opts = [ + cfg.BoolOpt('use_tpool', + default=False, + deprecated_name='dbapi_use_tpool', + deprecated_group='DEFAULT', + help='Enable the experimental use of thread pooling for ' + 'all DB API calls'), +] + +CONF = cfg.CONF +CONF.register_opts(tpool_opts, 'database') + _BACKEND_MAPPING = {'sqlalchemy': 'gceapi.db.sqlalchemy.api'} -IMPL = db_api.DBAPI(backend_mapping=_BACKEND_MAPPING) + + +class GCEDBAPI(object): + """gce's DB API wrapper class. + + This wraps the oslo DB API with an option to be able to use eventlet's + thread pooling. Since the CONF variable may not be loaded at the time + this class is instantiated, we must look at it on the first DB API call. + """ + + def __init__(self): + self.__db_api = None + + @property + def _db_api(self): + if not self.__db_api: + gce_db_api = db_api.DBAPI(CONF.database.backend, + backend_mapping=_BACKEND_MAPPING) + if CONF.database.use_tpool: + self.__db_api = tpool.Proxy(gce_db_api) + else: + self.__db_api = gce_db_api + return self.__db_api + + def __getattr__(self, key): + return getattr(self._db_api, key) + + +IMPL = GCEDBAPI() + +LOG = logging.getLogger(__name__) def add_item(context, kind, data): diff --git a/gceapi/db/migration.py b/gceapi/db/migration.py index 408b1ff..69e0efe 100644 --- a/gceapi/db/migration.py +++ b/gceapi/db/migration.py @@ -1,23 +1,23 @@ -# Copyright 2014 -# The Cloudscaling Group, Inc. +# Copyright 2013 Cloudscaling Group, Inc # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at # -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. """Database setup and migration commands.""" -from oslo.config import cfg +from oslo_config import cfg from gceapi import exception -from gceapi.openstack.common.gettextutils import _ +from gceapi.i18n import _ CONF = cfg.CONF @@ -60,14 +60,17 @@ IMPL = LazyPluggable('backend', config_group='database', sqlalchemy='gceapi.db.sqlalchemy.migration') -INIT_VERSION = 0 - def db_sync(version=None): """Migrate the database to `version` or the most recent version.""" - return IMPL.db_sync(INIT_VERSION, version=version) + return IMPL.db_sync(version=version) def db_version(): """Display the current database version.""" - return IMPL.db_version(INIT_VERSION) + return IMPL.db_version() + + +def db_initial_version(): + """The starting version for the database.""" + return IMPL.db_initial_version() diff --git a/gceapi/db/sqlalchemy/__init__.py b/gceapi/db/sqlalchemy/__init__.py index 966a69b..e69de29 100644 --- a/gceapi/db/sqlalchemy/__init__.py +++ b/gceapi/db/sqlalchemy/__init__.py @@ -1,13 +0,0 @@ -# Copyright 2014 -# The Cloudscaling Group, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/gceapi/db/sqlalchemy/api.py b/gceapi/db/sqlalchemy/api.py index 2d3f3f8..d4ab8fe 100644 --- a/gceapi/db/sqlalchemy/api.py +++ b/gceapi/db/sqlalchemy/api.py @@ -18,18 +18,34 @@ import ast import functools import sys -from oslo.config import cfg +from oslo_config import cfg +from oslo_db.sqlalchemy import session as db_session import gceapi.context from gceapi.db.sqlalchemy import models -from gceapi.openstack.common.db.sqlalchemy import session as db_session CONF = cfg.CONF -CONF.import_opt('connection', - 'gceapi.openstack.common.db.sqlalchemy.session', - group='database') -get_session = db_session.get_session + +_MASTER_FACADE = None + + +def _create_facade_lazily(): + global _MASTER_FACADE + + if _MASTER_FACADE is None: + _MASTER_FACADE = db_session.EngineFacade.from_config(CONF) + return _MASTER_FACADE + + +def get_engine(): + facade = _create_facade_lazily() + return facade.get_engine() + + +def get_session(**kwargs): + facade = _create_facade_lazily() + return facade.get_session(**kwargs) def get_backend(): diff --git a/gceapi/db/sqlalchemy/migrate_repo/__init__.py b/gceapi/db/sqlalchemy/migrate_repo/__init__.py index 966a69b..e69de29 100644 --- a/gceapi/db/sqlalchemy/migrate_repo/__init__.py +++ b/gceapi/db/sqlalchemy/migrate_repo/__init__.py @@ -1,13 +0,0 @@ -# Copyright 2014 -# The Cloudscaling Group, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/gceapi/db/sqlalchemy/migrate_repo/versions/__init__.py b/gceapi/db/sqlalchemy/migrate_repo/versions/__init__.py index 966a69b..e69de29 100644 --- a/gceapi/db/sqlalchemy/migrate_repo/versions/__init__.py +++ b/gceapi/db/sqlalchemy/migrate_repo/versions/__init__.py @@ -1,13 +0,0 @@ -# Copyright 2014 -# The Cloudscaling Group, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/gceapi/db/sqlalchemy/migration.py b/gceapi/db/sqlalchemy/migration.py index 73d3123..7f54c82 100644 --- a/gceapi/db/sqlalchemy/migration.py +++ b/gceapi/db/sqlalchemy/migration.py @@ -12,19 +12,75 @@ # See the License for the specific language governing permissions and # limitations under the License. + import os -from gceapi.openstack.common.db.sqlalchemy import migration +from migrate import exceptions as versioning_exceptions +from migrate.versioning import api as versioning_api +from migrate.versioning.repository import Repository +import sqlalchemy + +from gceapi.db.sqlalchemy import api as db_session +from gceapi import exception +from gceapi.i18n import _ + +INIT_VERSION = 0 +_REPOSITORY = None + +get_engine = db_session.get_engine -def db_sync(init_version, version=None): - return migration.db_sync(_get_repo_path(), version, init_version) +def db_sync(version=None): + if version is not None: + try: + version = int(version) + except ValueError: + raise exception.GceapiException(_("version should be an integer")) + + current_version = db_version() + repository = _find_migrate_repo() + if version is None or version > current_version: + return versioning_api.upgrade(get_engine(), repository, version) + else: + return versioning_api.downgrade(get_engine(), repository, + version) -def db_version(init_version): - return migration.db_version(_get_repo_path(), init_version) +def db_version(): + repository = _find_migrate_repo() + try: + return versioning_api.db_version(get_engine(), repository) + except versioning_exceptions.DatabaseNotControlledError: + meta = sqlalchemy.MetaData() + engine = get_engine() + meta.reflect(bind=engine) + tables = meta.tables + if len(tables) == 0: + db_version_control(INIT_VERSION) + return versioning_api.db_version(get_engine(), repository) + else: + # Some pre-Essex DB's may not be version controlled. + # Require them to upgrade using Essex first. + raise exception.GceapiException( + _("Upgrade DB using Essex release first.")) -def _get_repo_path(): - return os.path.join(os.path.abspath(os.path.dirname(__file__)), +def db_initial_version(): + return INIT_VERSION + + +def db_version_control(version=None): + repository = _find_migrate_repo() + versioning_api.version_control(get_engine(), repository, version) + return version + + +def _find_migrate_repo(): + """Get the path for the migrate repository.""" + global _REPOSITORY + path = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'migrate_repo') + assert os.path.exists(path) + if _REPOSITORY is None: + _REPOSITORY = Repository(path) + return _REPOSITORY diff --git a/gceapi/db/sqlalchemy/models.py b/gceapi/db/sqlalchemy/models.py index f74293d..57b6de2 100644 --- a/gceapi/db/sqlalchemy/models.py +++ b/gceapi/db/sqlalchemy/models.py @@ -16,11 +16,10 @@ SQLAlchemy models for gceapi data. """ +from oslo_db.sqlalchemy import models from sqlalchemy.ext.declarative import declarative_base from sqlalchemy import Column, Index, PrimaryKeyConstraint, String, Text -from gceapi.openstack.common.db.sqlalchemy import models - BASE = declarative_base() diff --git a/gceapi/exception.py b/gceapi/exception.py index 5439c7a..88787ae 100644 --- a/gceapi/exception.py +++ b/gceapi/exception.py @@ -26,11 +26,12 @@ SHOULD include dedicated exception logging. import sys -from oslo.config import cfg +from oslo_config import cfg +from oslo_log import log as logging + +from gceapi.i18n import _ import webob.exc -from gceapi.openstack.common.gettextutils import _ -from gceapi.openstack.common import log as logging LOG = logging.getLogger(__name__) diff --git a/gceapi/i18n.py b/gceapi/i18n.py new file mode 100644 index 0000000..cc0860f --- /dev/null +++ b/gceapi/i18n.py @@ -0,0 +1,46 @@ +# Copyright 2014 IBM Corp. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""oslo.i18n integration module. + +See http://docs.openstack.org/developer/oslo.i18n/usage.html . + +""" + +import oslo_i18n + +DOMAIN = 'gce-api' + +_translators = oslo_i18n.TranslatorFactory(domain=DOMAIN) + +# The primary translation function using the well-known name "_" +_ = _translators.primary + +# Translators for log levels. +# +# The abbreviated names are meant to reflect the usual use of a short +# name like '_'. The "L" is for "log" and the other letter comes from +# the level. +_LI = _translators.log_info +_LW = _translators.log_warning +_LE = _translators.log_error +_LC = _translators.log_critical + + +def translate(value, user_locale): + return oslo_i18n.translate(value, user_locale) + + +def get_available_languages(): + return oslo_i18n.get_available_languages(DOMAIN) diff --git a/gceapi/openstack/common/_i18n.py b/gceapi/openstack/common/_i18n.py new file mode 100644 index 0000000..161e463 --- /dev/null +++ b/gceapi/openstack/common/_i18n.py @@ -0,0 +1,45 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""oslo.i18n integration module. + +See http://docs.openstack.org/developer/oslo.i18n/usage.html + +""" + +try: + import oslo_i18n + + # NOTE(dhellmann): This reference to o-s-l-o will be replaced by the + # application name when this module is synced into the separate + # repository. It is OK to have more than one translation function + # using the same domain, since there will still only be one message + # catalog. + _translators = oslo_i18n.TranslatorFactory(domain='gceapi') + + # The primary translation function using the well-known name "_" + _ = _translators.primary + + # Translators for log levels. + # + # The abbreviated names are meant to reflect the usual use of a short + # name like '_'. The "L" is for "log" and the other letter comes from + # the level. + _LI = _translators.log_info + _LW = _translators.log_warning + _LE = _translators.log_error + _LC = _translators.log_critical +except ImportError: + # NOTE(dims): Support for cases where a project wants to use + # code from oslo-incubator, but is not ready to be internationalized + # (like tempest) + _ = _LI = _LW = _LE = _LC = lambda x: x diff --git a/gceapi/openstack/common/db/api.py b/gceapi/openstack/common/db/api.py deleted file mode 100644 index 5a6f9f1..0000000 --- a/gceapi/openstack/common/db/api.py +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright (c) 2013 Rackspace Hosting -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Multiple DB API backend support. - -Supported configuration options: - -The following two parameters are in the 'database' group: -`backend`: DB backend name or full module path to DB backend module. - -A DB backend module should implement a method named 'get_backend' which -takes no arguments. The method can return any object that implements DB -API methods. -""" - -from oslo.config import cfg - -from gceapi.openstack.common import importutils - - -db_opts = [ - cfg.StrOpt('backend', - default='sqlalchemy', - deprecated_name='db_backend', - deprecated_group='DEFAULT', - help='The backend to use for db'), -] - -CONF = cfg.CONF -CONF.register_opts(db_opts, 'database') - - -class DBAPI(object): - def __init__(self, backend_mapping=None): - if backend_mapping is None: - backend_mapping = {} - backend_name = CONF.database.backend - # Import the untranslated name if we don't have a - # mapping. - backend_path = backend_mapping.get(backend_name, backend_name) - backend_mod = importutils.import_module(backend_path) - self.__backend = backend_mod.get_backend() - - def __getattr__(self, key): - return getattr(self.__backend, key) diff --git a/gceapi/openstack/common/db/exception.py b/gceapi/openstack/common/db/exception.py deleted file mode 100644 index e4bafe0..0000000 --- a/gceapi/openstack/common/db/exception.py +++ /dev/null @@ -1,54 +0,0 @@ -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""DB related custom exceptions.""" - -from gceapi.openstack.common.gettextutils import _ - - -class DBError(Exception): - """Wraps an implementation specific exception.""" - def __init__(self, inner_exception=None): - self.inner_exception = inner_exception - super(DBError, self).__init__(str(inner_exception)) - - -class DBDuplicateEntry(DBError): - """Wraps an implementation specific exception.""" - def __init__(self, columns=[], inner_exception=None): - self.columns = columns - super(DBDuplicateEntry, self).__init__(inner_exception) - - -class DBDeadlock(DBError): - def __init__(self, inner_exception=None): - super(DBDeadlock, self).__init__(inner_exception) - - -class DBInvalidUnicodeParameter(Exception): - message = _("Invalid Parameter: " - "Unicode is not supported by the current database.") - - -class DbMigrationError(DBError): - """Wraps migration specific exception.""" - def __init__(self, message=None): - super(DbMigrationError, self).__init__(str(message)) - - -class DBConnectionError(DBError): - """Wraps connection specific exception.""" - pass diff --git a/gceapi/openstack/common/db/sqlalchemy/__init__.py b/gceapi/openstack/common/db/sqlalchemy/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/gceapi/openstack/common/db/sqlalchemy/migration.py b/gceapi/openstack/common/db/sqlalchemy/migration.py deleted file mode 100644 index dbac902..0000000 --- a/gceapi/openstack/common/db/sqlalchemy/migration.py +++ /dev/null @@ -1,265 +0,0 @@ -# coding: utf-8 -# -# Copyright (c) 2013 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# Base on code in migrate/changeset/databases/sqlite.py which is under -# the following license: -# -# The MIT License -# -# Copyright (c) 2009 Evan Rosson, Jan Dittberner, Domen Kožar -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -# THE SOFTWARE. - -import os -import re - -from migrate.changeset import ansisql -from migrate.changeset.databases import sqlite -from migrate import exceptions as versioning_exceptions -from migrate.versioning import api as versioning_api -from migrate.versioning.repository import Repository -import sqlalchemy -from sqlalchemy.schema import UniqueConstraint - -from gceapi.openstack.common.db import exception -from gceapi.openstack.common.db.sqlalchemy import session as db_session -from gceapi.openstack.common.gettextutils import _ - - -get_engine = db_session.get_engine - - -def _get_unique_constraints(self, table): - """Retrieve information about existing unique constraints of the table - - This feature is needed for _recreate_table() to work properly. - Unfortunately, it's not available in sqlalchemy 0.7.x/0.8.x. - - """ - - data = table.metadata.bind.execute( - """SELECT sql - FROM sqlite_master - WHERE - type='table' AND - name=:table_name""", - table_name=table.name - ).fetchone()[0] - - UNIQUE_PATTERN = "CONSTRAINT (\w+) UNIQUE \(([^\)]+)\)" - return [ - UniqueConstraint( - *[getattr(table.columns, c.strip(' "')) for c in cols.split(",")], - name=name - ) - for name, cols in re.findall(UNIQUE_PATTERN, data) - ] - - -def _recreate_table(self, table, column=None, delta=None, omit_uniques=None): - """Recreate the table properly - - Unlike the corresponding original method of sqlalchemy-migrate this one - doesn't drop existing unique constraints when creating a new one. - - """ - - table_name = self.preparer.format_table(table) - - # we remove all indexes so as not to have - # problems during copy and re-create - for index in table.indexes: - index.drop() - - # reflect existing unique constraints - for uc in self._get_unique_constraints(table): - table.append_constraint(uc) - # omit given unique constraints when creating a new table if required - table.constraints = set([ - cons for cons in table.constraints - if omit_uniques is None or cons.name not in omit_uniques - ]) - - self.append('ALTER TABLE %s RENAME TO migration_tmp' % table_name) - self.execute() - - insertion_string = self._modify_table(table, column, delta) - - table.create(bind=self.connection) - self.append(insertion_string % {'table_name': table_name}) - self.execute() - self.append('DROP TABLE migration_tmp') - self.execute() - - -def _visit_migrate_unique_constraint(self, *p, **k): - """Drop the given unique constraint - - The corresponding original method of sqlalchemy-migrate just - raises NotImplemented error - - """ - - self.recreate_table(p[0].table, omit_uniques=[p[0].name]) - - -def patch_migrate(): - """A workaround for SQLite's inability to alter things - - SQLite abilities to alter tables are very limited (please read - http://www.sqlite.org/lang_altertable.html for more details). - E. g. one can't drop a column or a constraint in SQLite. The - workaround for this is to recreate the original table omitting - the corresponding constraint (or column). - - sqlalchemy-migrate library has recreate_table() method that - implements this workaround, but it does it wrong: - - - information about unique constraints of a table - is not retrieved. So if you have a table with one - unique constraint and a migration adding another one - you will end up with a table that has only the - latter unique constraint, and the former will be lost - - - dropping of unique constraints is not supported at all - - The proper way to fix this is to provide a pull-request to - sqlalchemy-migrate, but the project seems to be dead. So we - can go on with monkey-patching of the lib at least for now. - - """ - - # this patch is needed to ensure that recreate_table() doesn't drop - # existing unique constraints of the table when creating a new one - helper_cls = sqlite.SQLiteHelper - helper_cls.recreate_table = _recreate_table - helper_cls._get_unique_constraints = _get_unique_constraints - - # this patch is needed to be able to drop existing unique constraints - constraint_cls = sqlite.SQLiteConstraintDropper - constraint_cls.visit_migrate_unique_constraint = \ - _visit_migrate_unique_constraint - constraint_cls.__bases__ = (ansisql.ANSIColumnDropper, - sqlite.SQLiteConstraintGenerator) - - -def db_sync(abs_path, version=None, init_version=0): - """Upgrade or downgrade a database. - - Function runs the upgrade() or downgrade() functions in change scripts. - - :param abs_path: Absolute path to migrate repository. - :param version: Database will upgrade/downgrade until this version. - If None - database will update to the latest - available version. - :param init_version: Initial database version - """ - if version is not None: - try: - version = int(version) - except ValueError: - raise exception.DbMigrationError( - message=_("version should be an integer")) - - current_version = db_version(abs_path, init_version) - repository = _find_migrate_repo(abs_path) - _db_schema_sanity_check() - if version is None or version > current_version: - return versioning_api.upgrade(get_engine(), repository, version) - else: - return versioning_api.downgrade(get_engine(), repository, - version) - - -def _db_schema_sanity_check(): - engine = get_engine() - if engine.name == 'mysql': - onlyutf8_sql = ('SELECT TABLE_NAME,TABLE_COLLATION ' - 'from information_schema.TABLES ' - 'where TABLE_SCHEMA=%s and ' - 'TABLE_COLLATION NOT LIKE "%%utf8%%"') - - table_names = [res[0] for res in engine.execute(onlyutf8_sql, - engine.url.database)] - if len(table_names) > 0: - raise ValueError(_('Tables "%s" have non utf8 collation, ' - 'please make sure all tables are CHARSET=utf8' - ) % ','.join(table_names)) - - -def db_version(abs_path, init_version): - """Show the current version of the repository. - - :param abs_path: Absolute path to migrate repository - :param version: Initial database version - """ - repository = _find_migrate_repo(abs_path) - try: - return versioning_api.db_version(get_engine(), repository) - except versioning_exceptions.DatabaseNotControlledError: - meta = sqlalchemy.MetaData() - engine = get_engine() - meta.reflect(bind=engine) - tables = meta.tables - if len(tables) == 0 or 'alembic_version' in tables: - db_version_control(abs_path, init_version) - return versioning_api.db_version(get_engine(), repository) - else: - raise exception.DbMigrationError( - message=_( - "The database is not under version control, but has " - "tables. Please stamp the current version of the schema " - "manually.")) - - -def db_version_control(abs_path, version=None): - """Mark a database as under this repository's version control. - - Once a database is under version control, schema changes should - only be done via change scripts in this repository. - - :param abs_path: Absolute path to migrate repository - :param version: Initial database version - """ - repository = _find_migrate_repo(abs_path) - versioning_api.version_control(get_engine(), repository, version) - return version - - -def _find_migrate_repo(abs_path): - """Get the project's change script repository - - :param abs_path: Absolute path to migrate repository - """ - if not os.path.exists(abs_path): - raise exception.DbMigrationError("Path %s not found" % abs_path) - return Repository(abs_path) diff --git a/gceapi/openstack/common/db/sqlalchemy/models.py b/gceapi/openstack/common/db/sqlalchemy/models.py deleted file mode 100644 index 1b6e5c2..0000000 --- a/gceapi/openstack/common/db/sqlalchemy/models.py +++ /dev/null @@ -1,117 +0,0 @@ -# Copyright (c) 2011 X.commerce, a business unit of eBay Inc. -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# Copyright 2011 Piston Cloud Computing, Inc. -# Copyright 2012 Cloudscaling Group, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -""" -SQLAlchemy models. -""" - -import six - -from sqlalchemy import Column, Integer -from sqlalchemy import DateTime -from sqlalchemy.orm import object_mapper - -from gceapi.openstack.common.db.sqlalchemy import session as sa -from gceapi.openstack.common import timeutils - - -class ModelBase(object): - """Base class for models.""" - __table_initialized__ = False - - def save(self, session=None): - """Save this object.""" - if not session: - session = sa.get_session() - # NOTE(boris-42): This part of code should be look like: - # session.add(self) - # session.flush() - # But there is a bug in sqlalchemy and eventlet that - # raises NoneType exception if there is no running - # transaction and rollback is called. As long as - # sqlalchemy has this bug we have to create transaction - # explicitly. - with session.begin(subtransactions=True): - session.add(self) - session.flush() - - def __setitem__(self, key, value): - setattr(self, key, value) - - def __getitem__(self, key): - return getattr(self, key) - - def get(self, key, default=None): - return getattr(self, key, default) - - @property - def _extra_keys(self): - """Specifies custom fields - - Subclasses can override this property to return a list - of custom fields that should be included in their dict - representation. - - For reference check tests/db/sqlalchemy/test_models.py - """ - return [] - - def __iter__(self): - columns = dict(object_mapper(self).columns).keys() - # NOTE(russellb): Allow models to specify other keys that can be looked - # up, beyond the actual db columns. An example would be the 'name' - # property for an Instance. - columns.extend(self._extra_keys) - self._i = iter(columns) - return self - - def next(self): - n = six.advance_iterator(self._i) - return n, getattr(self, n) - - def update(self, values): - """Make the model object behave like a dict.""" - for k, v in six.iteritems(values): - setattr(self, k, v) - - def iteritems(self): - """Make the model object behave like a dict. - - Includes attributes from joins. - """ - local = dict(self) - joined = dict([(k, v) for k, v in six.iteritems(self.__dict__) - if not k[0] == '_']) - local.update(joined) - return six.iteritems(local) - - -class TimestampMixin(object): - created_at = Column(DateTime, default=lambda: timeutils.utcnow()) - updated_at = Column(DateTime, onupdate=lambda: timeutils.utcnow()) - - -class SoftDeleteMixin(object): - deleted_at = Column(DateTime) - deleted = Column(Integer, default=0) - - def soft_delete(self, session=None): - """Mark this object as deleted.""" - self.deleted = self.id - self.deleted_at = timeutils.utcnow() - self.save(session=session) diff --git a/gceapi/openstack/common/db/sqlalchemy/provision.py b/gceapi/openstack/common/db/sqlalchemy/provision.py deleted file mode 100644 index 42c3c94..0000000 --- a/gceapi/openstack/common/db/sqlalchemy/provision.py +++ /dev/null @@ -1,187 +0,0 @@ -# Copyright 2013 Mirantis.inc -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Provision test environment for specific DB backends""" - -import argparse -import os -import random -import string - -from six import moves -import sqlalchemy - -from gceapi.openstack.common.db import exception as exc - - -SQL_CONNECTION = os.getenv('OS_TEST_DBAPI_ADMIN_CONNECTION', 'sqlite://') - - -def _gen_credentials(*names): - """Generate credentials.""" - auth_dict = {} - for name in names: - val = ''.join(random.choice(string.ascii_lowercase) - for i in moves.range(10)) - auth_dict[name] = val - return auth_dict - - -def _get_engine(uri=SQL_CONNECTION): - """Engine creation - - By default the uri is SQL_CONNECTION which is admin credentials. - Call the function without arguments to get admin connection. Admin - connection required to create temporary user and database for each - particular test. Otherwise use existing connection to recreate connection - to the temporary database. - """ - return sqlalchemy.create_engine(uri, poolclass=sqlalchemy.pool.NullPool) - - -def _execute_sql(engine, sql, driver): - """Initialize connection, execute sql query and close it.""" - try: - with engine.connect() as conn: - if driver == 'postgresql': - conn.connection.set_isolation_level(0) - for s in sql: - conn.execute(s) - except sqlalchemy.exc.OperationalError: - msg = ('%s does not match database admin ' - 'credentials or database does not exist.') - raise exc.DBConnectionError(msg % SQL_CONNECTION) - - -def create_database(engine): - """Provide temporary user and database for each particular test.""" - driver = engine.name - - auth = _gen_credentials('database', 'user', 'passwd') - - sqls = { - 'mysql': [ - "drop database if exists %(database)s;", - "grant all on %(database)s.* to '%(user)s'@'localhost'" - " identified by '%(passwd)s';", - "create database %(database)s;", - ], - 'postgresql': [ - "drop database if exists %(database)s;", - "drop user if exists %(user)s;", - "create user %(user)s with password '%(passwd)s';", - "create database %(database)s owner %(user)s;", - ] - } - - if driver == 'sqlite': - return 'sqlite:////tmp/%s' % auth['database'] - - try: - sql_rows = sqls[driver] - except KeyError: - raise ValueError('Unsupported RDBMS %s' % driver) - sql_query = map(lambda x: x % auth, sql_rows) - - _execute_sql(engine, sql_query, driver) - - params = auth.copy() - params['backend'] = driver - return "%(backend)s://%(user)s:%(passwd)s@localhost/%(database)s" % params - - -def drop_database(engine, current_uri): - """Drop temporary database and user after each particular test.""" - engine = _get_engine(current_uri) - admin_engine = _get_engine() - driver = engine.name - auth = {'database': engine.url.database, 'user': engine.url.username} - - if driver == 'sqlite': - try: - os.remove(auth['database']) - except OSError: - pass - return - - sqls = { - 'mysql': [ - "drop database if exists %(database)s;", - "drop user '%(user)s'@'localhost';", - ], - 'postgresql': [ - "drop database if exists %(database)s;", - "drop user if exists %(user)s;", - ] - } - - try: - sql_rows = sqls[driver] - except KeyError: - raise ValueError('Unsupported RDBMS %s' % driver) - sql_query = map(lambda x: x % auth, sql_rows) - - _execute_sql(admin_engine, sql_query, driver) - - -def main(): - """Controller to handle commands - - ::create: Create test user and database with random names. - ::drop: Drop user and database created by previous command. - """ - parser = argparse.ArgumentParser( - description='Controller to handle database creation and dropping' - ' commands.', - epilog='Under normal circumstances is not used directly.' - ' Used in .testr.conf to automate test database creation' - ' and dropping processes.') - subparsers = parser.add_subparsers( - help='Subcommands to manipulate temporary test databases.') - - create = subparsers.add_parser( - 'create', - help='Create temporary test ' - 'databases and users.') - create.set_defaults(which='create') - create.add_argument( - 'instances_count', - type=int, - help='Number of databases to create.') - - drop = subparsers.add_parser( - 'drop', - help='Drop temporary test databases and users.') - drop.set_defaults(which='drop') - drop.add_argument( - 'instances', - nargs='+', - help='List of databases uri to be dropped.') - - args = parser.parse_args() - - engine = _get_engine() - which = args.which - - if which == "create": - for i in range(int(args.instances_count)): - print(create_database(engine)) - elif which == "drop": - for db in args.instances: - drop_database(engine, db) - - -if __name__ == "__main__": - main() diff --git a/gceapi/openstack/common/db/sqlalchemy/session.py b/gceapi/openstack/common/db/sqlalchemy/session.py deleted file mode 100644 index 71105f6..0000000 --- a/gceapi/openstack/common/db/sqlalchemy/session.py +++ /dev/null @@ -1,867 +0,0 @@ -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Session Handling for SQLAlchemy backend. - -Initializing: - -* Call set_defaults with the minimal of the following kwargs: - sql_connection, sqlite_db - - Example:: - - session.set_defaults( - sql_connection="sqlite:///var/lib/gceapi/sqlite.db", - sqlite_db="/var/lib/gceapi/sqlite.db") - -Recommended ways to use sessions within this framework: - -* Don't use them explicitly; this is like running with AUTOCOMMIT=1. - model_query() will implicitly use a session when called without one - supplied. This is the ideal situation because it will allow queries - to be automatically retried if the database connection is interrupted. - - Note: Automatic retry will be enabled in a future patch. - - It is generally fine to issue several queries in a row like this. Even though - they may be run in separate transactions and/or separate sessions, each one - will see the data from the prior calls. If needed, undo- or rollback-like - functionality should be handled at a logical level. For an example, look at - the code around quotas and reservation_rollback(). - - Examples:: - - def get_foo(context, foo): - return (model_query(context, models.Foo). - filter_by(foo=foo). - first()) - - def update_foo(context, id, newfoo): - (model_query(context, models.Foo). - filter_by(id=id). - update({'foo': newfoo})) - - def create_foo(context, values): - foo_ref = models.Foo() - foo_ref.update(values) - foo_ref.save() - return foo_ref - - -* Within the scope of a single method, keeping all the reads and writes within - the context managed by a single session. In this way, the session's __exit__ - handler will take care of calling flush() and commit() for you. - If using this approach, you should not explicitly call flush() or commit(). - Any error within the context of the session will cause the session to emit - a ROLLBACK. Database Errors like IntegrityError will be raised in - session's __exit__ handler, and any try/except within the context managed - by session will not be triggered. And catching other non-database errors in - the session will not trigger the ROLLBACK, so exception handlers should - always be outside the session, unless the developer wants to do a partial - commit on purpose. If the connection is dropped before this is possible, - the database will implicitly roll back the transaction. - - Note: statements in the session scope will not be automatically retried. - - If you create models within the session, they need to be added, but you - do not need to call model.save() - - :: - - def create_many_foo(context, foos): - session = get_session() - with session.begin(): - for foo in foos: - foo_ref = models.Foo() - foo_ref.update(foo) - session.add(foo_ref) - - def update_bar(context, foo_id, newbar): - session = get_session() - with session.begin(): - foo_ref = (model_query(context, models.Foo, session). - filter_by(id=foo_id). - first()) - (model_query(context, models.Bar, session). - filter_by(id=foo_ref['bar_id']). - update({'bar': newbar})) - - Note: update_bar is a trivially simple example of using "with session.begin". - Whereas create_many_foo is a good example of when a transaction is needed, - it is always best to use as few queries as possible. The two queries in - update_bar can be better expressed using a single query which avoids - the need for an explicit transaction. It can be expressed like so:: - - def update_bar(context, foo_id, newbar): - subq = (model_query(context, models.Foo.id). - filter_by(id=foo_id). - limit(1). - subquery()) - (model_query(context, models.Bar). - filter_by(id=subq.as_scalar()). - update({'bar': newbar})) - - For reference, this emits approximately the following SQL statement:: - - UPDATE bar SET bar = ${newbar} - WHERE id=(SELECT bar_id FROM foo WHERE id = ${foo_id} LIMIT 1); - - Note: create_duplicate_foo is a trivially simple example of catching an - exception while using "with session.begin". Here create two duplicate - instances with same primary key, must catch the exception out of context - managed by a single session: - - def create_duplicate_foo(context): - foo1 = models.Foo() - foo2 = models.Foo() - foo1.id = foo2.id = 1 - session = get_session() - try: - with session.begin(): - session.add(foo1) - session.add(foo2) - except exception.DBDuplicateEntry as e: - handle_error(e) - -* Passing an active session between methods. Sessions should only be passed - to private methods. The private method must use a subtransaction; otherwise - SQLAlchemy will throw an error when you call session.begin() on an existing - transaction. Public methods should not accept a session parameter and should - not be involved in sessions within the caller's scope. - - Note that this incurs more overhead in SQLAlchemy than the above means - due to nesting transactions, and it is not possible to implicitly retry - failed database operations when using this approach. - - This also makes code somewhat more difficult to read and debug, because a - single database transaction spans more than one method. Error handling - becomes less clear in this situation. When this is needed for code clarity, - it should be clearly documented. - - :: - - def myfunc(foo): - session = get_session() - with session.begin(): - # do some database things - bar = _private_func(foo, session) - return bar - - def _private_func(foo, session=None): - if not session: - session = get_session() - with session.begin(subtransaction=True): - # do some other database things - return bar - - -There are some things which it is best to avoid: - -* Don't keep a transaction open any longer than necessary. - - This means that your "with session.begin()" block should be as short - as possible, while still containing all the related calls for that - transaction. - -* Avoid "with_lockmode('UPDATE')" when possible. - - In MySQL/InnoDB, when a "SELECT ... FOR UPDATE" query does not match - any rows, it will take a gap-lock. This is a form of write-lock on the - "gap" where no rows exist, and prevents any other writes to that space. - This can effectively prevent any INSERT into a table by locking the gap - at the end of the index. Similar problems will occur if the SELECT FOR UPDATE - has an overly broad WHERE clause, or doesn't properly use an index. - - One idea proposed at ODS Fall '12 was to use a normal SELECT to test the - number of rows matching a query, and if only one row is returned, - then issue the SELECT FOR UPDATE. - - The better long-term solution is to use INSERT .. ON DUPLICATE KEY UPDATE. - However, this can not be done until the "deleted" columns are removed and - proper UNIQUE constraints are added to the tables. - - -Enabling soft deletes: - -* To use/enable soft-deletes, the SoftDeleteMixin must be added - to your model class. For example:: - - class NovaBase(models.SoftDeleteMixin, models.ModelBase): - pass - - -Efficient use of soft deletes: - -* There are two possible ways to mark a record as deleted:: - - model.soft_delete() and query.soft_delete(). - - model.soft_delete() method works with single already fetched entry. - query.soft_delete() makes only one db request for all entries that correspond - to query. - -* In almost all cases you should use query.soft_delete(). Some examples:: - - def soft_delete_bar(): - count = model_query(BarModel).find(some_condition).soft_delete() - if count == 0: - raise Exception("0 entries were soft deleted") - - def complex_soft_delete_with_synchronization_bar(session=None): - if session is None: - session = get_session() - with session.begin(subtransactions=True): - count = (model_query(BarModel). - find(some_condition). - soft_delete(synchronize_session=True)) - # Here synchronize_session is required, because we - # don't know what is going on in outer session. - if count == 0: - raise Exception("0 entries were soft deleted") - -* There is only one situation where model.soft_delete() is appropriate: when - you fetch a single record, work with it, and mark it as deleted in the same - transaction. - - :: - - def soft_delete_bar_model(): - session = get_session() - with session.begin(): - bar_ref = model_query(BarModel).find(some_condition).first() - # Work with bar_ref - bar_ref.soft_delete(session=session) - - However, if you need to work with all entries that correspond to query and - then soft delete them you should use query.soft_delete() method:: - - def soft_delete_multi_models(): - session = get_session() - with session.begin(): - query = (model_query(BarModel, session=session). - find(some_condition)) - model_refs = query.all() - # Work with model_refs - query.soft_delete(synchronize_session=False) - # synchronize_session=False should be set if there is no outer - # session and these entries are not used after this. - - When working with many rows, it is very important to use query.soft_delete, - which issues a single query. Using model.soft_delete(), as in the following - example, is very inefficient. - - :: - - for bar_ref in bar_refs: - bar_ref.soft_delete(session=session) - # This will produce count(bar_refs) db requests. -""" - -import functools -import os.path -import re -import time - -from oslo.config import cfg -import six -from sqlalchemy import exc as sqla_exc -from sqlalchemy.interfaces import PoolListener -import sqlalchemy.orm -from sqlalchemy.pool import NullPool, StaticPool -from sqlalchemy.sql.expression import literal_column - -from gceapi.openstack.common.db import exception -from gceapi.openstack.common.gettextutils import _ -from gceapi.openstack.common import log as logging -from gceapi.openstack.common import timeutils - -sqlite_db_opts = [ - cfg.StrOpt('sqlite_db', - default='gceapi.sqlite', - help='The file name to use with SQLite'), - cfg.BoolOpt('sqlite_synchronous', - default=True, - help='If True, SQLite uses synchronous mode'), -] - -database_opts = [ - cfg.StrOpt('connection', - default='sqlite:///' + - os.path.abspath(os.path.join(os.path.dirname(__file__), - '../', '$sqlite_db')), - help='The SQLAlchemy connection string used to connect to the ' - 'database', - secret=True, - deprecated_opts=[cfg.DeprecatedOpt('sql_connection', - group='DEFAULT'), - cfg.DeprecatedOpt('sql_connection', - group='DATABASE'), - cfg.DeprecatedOpt('connection', - group='sql'), ]), - cfg.StrOpt('slave_connection', - default='', - secret=True, - help='The SQLAlchemy connection string used to connect to the ' - 'slave database'), - cfg.IntOpt('idle_timeout', - default=3600, - deprecated_opts=[cfg.DeprecatedOpt('sql_idle_timeout', - group='DEFAULT'), - cfg.DeprecatedOpt('sql_idle_timeout', - group='DATABASE'), - cfg.DeprecatedOpt('idle_timeout', - group='sql')], - help='Timeout before idle sql connections are reaped'), - cfg.IntOpt('min_pool_size', - default=1, - deprecated_opts=[cfg.DeprecatedOpt('sql_min_pool_size', - group='DEFAULT'), - cfg.DeprecatedOpt('sql_min_pool_size', - group='DATABASE')], - help='Minimum number of SQL connections to keep open in a ' - 'pool'), - cfg.IntOpt('max_pool_size', - default=None, - deprecated_opts=[cfg.DeprecatedOpt('sql_max_pool_size', - group='DEFAULT'), - cfg.DeprecatedOpt('sql_max_pool_size', - group='DATABASE')], - help='Maximum number of SQL connections to keep open in a ' - 'pool'), - cfg.IntOpt('max_retries', - default=10, - deprecated_opts=[cfg.DeprecatedOpt('sql_max_retries', - group='DEFAULT'), - cfg.DeprecatedOpt('sql_max_retries', - group='DATABASE')], - help='Maximum db connection retries during startup. ' - '(setting -1 implies an infinite retry count)'), - cfg.IntOpt('retry_interval', - default=10, - deprecated_opts=[cfg.DeprecatedOpt('sql_retry_interval', - group='DEFAULT'), - cfg.DeprecatedOpt('reconnect_interval', - group='DATABASE')], - help='Interval between retries of opening a sql connection'), - cfg.IntOpt('max_overflow', - default=None, - deprecated_opts=[cfg.DeprecatedOpt('sql_max_overflow', - group='DEFAULT'), - cfg.DeprecatedOpt('sqlalchemy_max_overflow', - group='DATABASE')], - help='If set, use this value for max_overflow with sqlalchemy'), - cfg.IntOpt('connection_debug', - default=0, - deprecated_opts=[cfg.DeprecatedOpt('sql_connection_debug', - group='DEFAULT')], - help='Verbosity of SQL debugging information. 0=None, ' - '100=Everything'), - cfg.BoolOpt('connection_trace', - default=False, - deprecated_opts=[cfg.DeprecatedOpt('sql_connection_trace', - group='DEFAULT')], - help='Add python stack traces to SQL as comment strings'), - cfg.IntOpt('pool_timeout', - default=None, - deprecated_opts=[cfg.DeprecatedOpt('sqlalchemy_pool_timeout', - group='DATABASE')], - help='If set, use this value for pool_timeout with sqlalchemy'), -] - -CONF = cfg.CONF -CONF.register_opts(sqlite_db_opts) -CONF.register_opts(database_opts, 'database') - -LOG = logging.getLogger(__name__) - -_ENGINE = None -_MAKER = None -_SLAVE_ENGINE = None -_SLAVE_MAKER = None - - -def set_defaults(sql_connection, sqlite_db, max_pool_size=None, - max_overflow=None, pool_timeout=None): - """Set defaults for configuration variables.""" - cfg.set_defaults(database_opts, - connection=sql_connection) - cfg.set_defaults(sqlite_db_opts, - sqlite_db=sqlite_db) - # Update the QueuePool defaults - if max_pool_size is not None: - cfg.set_defaults(database_opts, - max_pool_size=max_pool_size) - if max_overflow is not None: - cfg.set_defaults(database_opts, - max_overflow=max_overflow) - if pool_timeout is not None: - cfg.set_defaults(database_opts, - pool_timeout=pool_timeout) - - -def cleanup(): - global _ENGINE, _MAKER - global _SLAVE_ENGINE, _SLAVE_MAKER - - if _MAKER: - _MAKER.close_all() - _MAKER = None - if _ENGINE: - _ENGINE.dispose() - _ENGINE = None - if _SLAVE_MAKER: - _SLAVE_MAKER.close_all() - _SLAVE_MAKER = None - if _SLAVE_ENGINE: - _SLAVE_ENGINE.dispose() - _SLAVE_ENGINE = None - - -class SqliteForeignKeysListener(PoolListener): - """Ensures that the foreign key constraints are enforced in SQLite. - - The foreign key constraints are disabled by default in SQLite, - so the foreign key constraints will be enabled here for every - database connection - """ - def connect(self, dbapi_con, con_record): - dbapi_con.execute('pragma foreign_keys=ON') - - -def get_session(autocommit=True, expire_on_commit=False, sqlite_fk=False, - slave_session=False, mysql_traditional_mode=False): - """Return a SQLAlchemy session.""" - global _MAKER - global _SLAVE_MAKER - maker = _MAKER - - if slave_session: - maker = _SLAVE_MAKER - - if maker is None: - engine = get_engine(sqlite_fk=sqlite_fk, slave_engine=slave_session, - mysql_traditional_mode=mysql_traditional_mode) - maker = get_maker(engine, autocommit, expire_on_commit) - - if slave_session: - _SLAVE_MAKER = maker - else: - _MAKER = maker - - session = maker() - return session - - -# note(boris-42): In current versions of DB backends unique constraint -# violation messages follow the structure: -# -# sqlite: -# 1 column - (IntegrityError) column c1 is not unique -# N columns - (IntegrityError) column c1, c2, ..., N are not unique -# -# sqlite since 3.7.16: -# 1 column - (IntegrityError) UNIQUE constraint failed: k1 -# -# N columns - (IntegrityError) UNIQUE constraint failed: k1, k2 -# -# postgres: -# 1 column - (IntegrityError) duplicate key value violates unique -# constraint "users_c1_key" -# N columns - (IntegrityError) duplicate key value violates unique -# constraint "name_of_our_constraint" -# -# mysql: -# 1 column - (IntegrityError) (1062, "Duplicate entry 'value_of_c1' for key -# 'c1'") -# N columns - (IntegrityError) (1062, "Duplicate entry 'values joined -# with -' for key 'name_of_our_constraint'") -_DUP_KEY_RE_DB = { - "sqlite": (re.compile(r"^.*columns?([^)]+)(is|are)\s+not\s+unique$"), - re.compile(r"^.*UNIQUE\s+constraint\s+failed:\s+(.+)$")), - "postgresql": (re.compile(r"^.*duplicate\s+key.*\"([^\"]+)\"\s*\n.*$"),), - "mysql": (re.compile(r"^.*\(1062,.*'([^\']+)'\"\)$"),) -} - - -def _raise_if_duplicate_entry_error(integrity_error, engine_name): - """Raise exception if two entries are duplicated. - - In this function will be raised DBDuplicateEntry exception if integrity - error wrap unique constraint violation. - """ - - def get_columns_from_uniq_cons_or_name(columns): - # note(vsergeyev): UniqueConstraint name convention: "uniq_t0c10c2" - # where `t` it is table name and columns `c1`, `c2` - # are in UniqueConstraint. - uniqbase = "uniq_" - if not columns.startswith(uniqbase): - if engine_name == "postgresql": - return [columns[columns.index("_") + 1:columns.rindex("_")]] - return [columns] - return columns[len(uniqbase):].split("0")[1:] - - if engine_name not in ["mysql", "sqlite", "postgresql"]: - return - - # FIXME(johannes): The usage of the .message attribute has been - # deprecated since Python 2.6. However, the exceptions raised by - # SQLAlchemy can differ when using unicode() and accessing .message. - # An audit across all three supported engines will be necessary to - # ensure there are no regressions. - for pattern in _DUP_KEY_RE_DB[engine_name]: - match = pattern.match(integrity_error.message) - if match: - break - else: - return - - columns = match.group(1) - - if engine_name == "sqlite": - columns = columns.strip().split(", ") - else: - columns = get_columns_from_uniq_cons_or_name(columns) - raise exception.DBDuplicateEntry(columns, integrity_error) - - -# NOTE(comstud): In current versions of DB backends, Deadlock violation -# messages follow the structure: -# -# mysql: -# (OperationalError) (1213, 'Deadlock found when trying to get lock; try ' -# 'restarting transaction') -_DEADLOCK_RE_DB = { - "mysql": re.compile(r"^.*\(1213, 'Deadlock.*") -} - - -def _raise_if_deadlock_error(operational_error, engine_name): - """Raise exception on deadlock condition. - - Raise DBDeadlock exception if OperationalError contains a Deadlock - condition. - """ - re = _DEADLOCK_RE_DB.get(engine_name) - if re is None: - return - # FIXME(johannes): The usage of the .message attribute has been - # deprecated since Python 2.6. However, the exceptions raised by - # SQLAlchemy can differ when using unicode() and accessing .message. - # An audit across all three supported engines will be necessary to - # ensure there are no regressions. - m = re.match(operational_error.message) - if not m: - return - raise exception.DBDeadlock(operational_error) - - -def _wrap_db_error(f): - @functools.wraps(f) - def _wrap(*args, **kwargs): - try: - return f(*args, **kwargs) - except UnicodeEncodeError: - raise exception.DBInvalidUnicodeParameter() - # note(boris-42): We should catch unique constraint violation and - # wrap it by our own DBDuplicateEntry exception. Unique constraint - # violation is wrapped by IntegrityError. - except sqla_exc.OperationalError as e: - _raise_if_deadlock_error(e, get_engine().name) - # NOTE(comstud): A lot of code is checking for OperationalError - # so let's not wrap it for now. - raise - except sqla_exc.IntegrityError as e: - # note(boris-42): SqlAlchemy doesn't unify errors from different - # DBs so we must do this. Also in some tables (for example - # instance_types) there are more than one unique constraint. This - # means we should get names of columns, which values violate - # unique constraint, from error message. - _raise_if_duplicate_entry_error(e, get_engine().name) - raise exception.DBError(e) - except Exception as e: - LOG.exception(_('DB exception wrapped.')) - raise exception.DBError(e) - return _wrap - - -def get_engine(sqlite_fk=False, slave_engine=False, - mysql_traditional_mode=False): - """Return a SQLAlchemy engine.""" - global _ENGINE - global _SLAVE_ENGINE - engine = _ENGINE - db_uri = CONF.database.connection - - if slave_engine: - engine = _SLAVE_ENGINE - db_uri = CONF.database.slave_connection - - if engine is None: - engine = create_engine(db_uri, sqlite_fk=sqlite_fk, - mysql_traditional_mode=mysql_traditional_mode) - if slave_engine: - _SLAVE_ENGINE = engine - else: - _ENGINE = engine - - return engine - - -def _synchronous_switch_listener(dbapi_conn, connection_rec): - """Switch sqlite connections to non-synchronous mode.""" - dbapi_conn.execute("PRAGMA synchronous = OFF") - - -def _add_regexp_listener(dbapi_con, con_record): - """Add REGEXP function to sqlite connections.""" - - def regexp(expr, item): - reg = re.compile(expr) - return reg.search(six.text_type(item)) is not None - dbapi_con.create_function('regexp', 2, regexp) - - -def _thread_yield(dbapi_con, con_record): - """Ensure other greenthreads get a chance to be executed. - - If we use eventlet.monkey_patch(), eventlet.greenthread.sleep(0) will - execute instead of time.sleep(0). - Force a context switch. With common database backends (eg MySQLdb and - sqlite), there is no implicit yield caused by network I/O since they are - implemented by C libraries that eventlet cannot monkey patch. - """ - time.sleep(0) - - -def _ping_listener(engine, dbapi_conn, connection_rec, connection_proxy): - """Ensures that MySQL and DB2 connections are alive. - - Borrowed from: - http://groups.google.com/group/sqlalchemy/msg/a4ce563d802c929f - """ - cursor = dbapi_conn.cursor() - try: - ping_sql = 'select 1' - if engine.name == 'ibm_db_sa': - # DB2 requires a table expression - ping_sql = 'select 1 from (values (1)) AS t1' - cursor.execute(ping_sql) - except Exception as ex: - if engine.dialect.is_disconnect(ex, dbapi_conn, cursor): - msg = _('Database server has gone away: %s') % ex - LOG.warning(msg) - raise sqla_exc.DisconnectionError(msg) - else: - raise - - -def _set_mode_traditional(dbapi_con, connection_rec, connection_proxy): - """Set engine mode to 'traditional'. - - Required to prevent silent truncates at insert or update operations - under MySQL. By default MySQL truncates inserted string if it longer - than a declared field just with warning. That is fraught with data - corruption. - """ - dbapi_con.cursor().execute("SET SESSION sql_mode = TRADITIONAL;") - - -def _is_db_connection_error(args): - """Return True if error in connecting to db.""" - # NOTE(adam_g): This is currently MySQL specific and needs to be extended - # to support Postgres and others. - # For the db2, the error code is -30081 since the db2 is still not ready - conn_err_codes = ('2002', '2003', '2006', '2013', '-30081') - for err_code in conn_err_codes: - if args.find(err_code) != -1: - return True - return False - - -def create_engine(sql_connection, sqlite_fk=False, - mysql_traditional_mode=False): - """Return a new SQLAlchemy engine.""" - # NOTE(geekinutah): At this point we could be connecting to the normal - # db handle or the slave db handle. Things like - # _wrap_db_error aren't going to work well if their - # backends don't match. Let's check. - _assert_matching_drivers() - connection_dict = sqlalchemy.engine.url.make_url(sql_connection) - - engine_args = { - "pool_recycle": CONF.database.idle_timeout, - "echo": False, - 'convert_unicode': True, - } - - # Map our SQL debug level to SQLAlchemy's options - if CONF.database.connection_debug >= 100: - engine_args['echo'] = 'debug' - elif CONF.database.connection_debug >= 50: - engine_args['echo'] = True - - if "sqlite" in connection_dict.drivername: - if sqlite_fk: - engine_args["listeners"] = [SqliteForeignKeysListener()] - engine_args["poolclass"] = NullPool - - if CONF.database.connection == "sqlite://": - engine_args["poolclass"] = StaticPool - engine_args["connect_args"] = {'check_same_thread': False} - else: - if CONF.database.max_pool_size is not None: - engine_args['pool_size'] = CONF.database.max_pool_size - if CONF.database.max_overflow is not None: - engine_args['max_overflow'] = CONF.database.max_overflow - if CONF.database.pool_timeout is not None: - engine_args['pool_timeout'] = CONF.database.pool_timeout - - engine = sqlalchemy.create_engine(sql_connection, **engine_args) - - sqlalchemy.event.listen(engine, 'checkin', _thread_yield) - - if engine.name in ['mysql', 'ibm_db_sa']: - callback = functools.partial(_ping_listener, engine) - sqlalchemy.event.listen(engine, 'checkout', callback) - if mysql_traditional_mode: - sqlalchemy.event.listen(engine, 'checkout', _set_mode_traditional) - else: - LOG.warning(_("This application has not enabled MySQL traditional" - " mode, which means silent data corruption may" - " occur. Please encourage the application" - " developers to enable this mode.")) - elif 'sqlite' in connection_dict.drivername: - if not CONF.sqlite_synchronous: - sqlalchemy.event.listen(engine, 'connect', - _synchronous_switch_listener) - sqlalchemy.event.listen(engine, 'connect', _add_regexp_listener) - - if (CONF.database.connection_trace and - engine.dialect.dbapi.__name__ == 'MySQLdb'): - _patch_mysqldb_with_stacktrace_comments() - - try: - engine.connect() - except sqla_exc.OperationalError as e: - if not _is_db_connection_error(e.args[0]): - raise - - remaining = CONF.database.max_retries - if remaining == -1: - remaining = 'infinite' - while True: - msg = _('SQL connection failed. %s attempts left.') - LOG.warning(msg % remaining) - if remaining != 'infinite': - remaining -= 1 - time.sleep(CONF.database.retry_interval) - try: - engine.connect() - break - except sqla_exc.OperationalError as e: - if (remaining != 'infinite' and remaining == 0) or \ - not _is_db_connection_error(e.args[0]): - raise - return engine - - -class Query(sqlalchemy.orm.query.Query): - """Subclass of sqlalchemy.query with soft_delete() method.""" - def soft_delete(self, synchronize_session='evaluate'): - return self.update({'deleted': literal_column('id'), - 'updated_at': literal_column('updated_at'), - 'deleted_at': timeutils.utcnow()}, - synchronize_session=synchronize_session) - - -class Session(sqlalchemy.orm.session.Session): - """Custom Session class to avoid SqlAlchemy Session monkey patching.""" - @_wrap_db_error - def query(self, *args, **kwargs): - return super(Session, self).query(*args, **kwargs) - - @_wrap_db_error - def flush(self, *args, **kwargs): - return super(Session, self).flush(*args, **kwargs) - - @_wrap_db_error - def execute(self, *args, **kwargs): - return super(Session, self).execute(*args, **kwargs) - - -def get_maker(engine, autocommit=True, expire_on_commit=False): - """Return a SQLAlchemy sessionmaker using the given engine.""" - return sqlalchemy.orm.sessionmaker(bind=engine, - class_=Session, - autocommit=autocommit, - expire_on_commit=expire_on_commit, - query_cls=Query) - - -def _patch_mysqldb_with_stacktrace_comments(): - """Adds current stack trace as a comment in queries. - - Patches MySQLdb.cursors.BaseCursor._do_query. - """ - import MySQLdb.cursors - import traceback - - old_mysql_do_query = MySQLdb.cursors.BaseCursor._do_query - - def _do_query(self, q): - stack = '' - for filename, line, method, function in traceback.extract_stack(): - # exclude various common things from trace - if filename.endswith('session.py') and method == '_do_query': - continue - if filename.endswith('api.py') and method == 'wrapper': - continue - if filename.endswith('utils.py') and method == '_inner': - continue - if filename.endswith('exception.py') and method == '_wrap': - continue - # db/api is just a wrapper around db/sqlalchemy/api - if filename.endswith('db/api.py'): - continue - # only trace inside gceapi - index = filename.rfind('gceapi') - if index == -1: - continue - stack += "File:%s:%s Method:%s() Line:%s | " \ - % (filename[index:], line, method, function) - - # strip trailing " | " from stack - if stack: - stack = stack[:-3] - qq = "%s /* %s */" % (q, stack) - else: - qq = q - old_mysql_do_query(self, qq) - - setattr(MySQLdb.cursors.BaseCursor, '_do_query', _do_query) - - -def _assert_matching_drivers(): - """Make sure slave handle and normal handle have the same driver.""" - # NOTE(geekinutah): There's no use case for writing to one backend and - # reading from another. Who knows what the future holds? - if CONF.database.slave_connection == '': - return - - normal = sqlalchemy.engine.url.make_url(CONF.database.connection) - slave = sqlalchemy.engine.url.make_url(CONF.database.slave_connection) - assert normal.drivername == slave.drivername diff --git a/gceapi/openstack/common/db/sqlalchemy/test_migrations.py b/gceapi/openstack/common/db/sqlalchemy/test_migrations.py deleted file mode 100644 index 7162aa3..0000000 --- a/gceapi/openstack/common/db/sqlalchemy/test_migrations.py +++ /dev/null @@ -1,269 +0,0 @@ -# Copyright 2010-2011 OpenStack Foundation -# Copyright 2012-2013 IBM Corp. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import functools -import os -import subprocess - -import lockfile -from six import moves -import sqlalchemy -import sqlalchemy.exc - -from gceapi.openstack.common.db.sqlalchemy import utils -from gceapi.openstack.common.gettextutils import _ -from gceapi.openstack.common import log as logging -from gceapi.openstack.common.py3kcompat import urlutils -from gceapi.openstack.common import test - -LOG = logging.getLogger(__name__) - - -def _have_mysql(user, passwd, database): - present = os.environ.get('TEST_MYSQL_PRESENT') - if present is None: - return utils.is_backend_avail(backend='mysql', - user=user, - passwd=passwd, - database=database) - return present.lower() in ('', 'true') - - -def _have_postgresql(user, passwd, database): - present = os.environ.get('TEST_POSTGRESQL_PRESENT') - if present is None: - return utils.is_backend_avail(backend='postgres', - user=user, - passwd=passwd, - database=database) - return present.lower() in ('', 'true') - - -def _set_db_lock(lock_path=None, lock_prefix=None): - def decorator(f): - @functools.wraps(f) - def wrapper(*args, **kwargs): - try: - path = lock_path or os.environ.get("GCEAPI_LOCK_PATH") - lock = lockfile.FileLock(os.path.join(path, lock_prefix)) - with lock: - LOG.debug(_('Got lock "%s"') % f.__name__) - return f(*args, **kwargs) - finally: - LOG.debug(_('Lock released "%s"') % f.__name__) - return wrapper - return decorator - - -class BaseMigrationTestCase(test.BaseTestCase): - """Base class fort testing of migration utils.""" - - def __init__(self, *args, **kwargs): - super(BaseMigrationTestCase, self).__init__(*args, **kwargs) - - self.DEFAULT_CONFIG_FILE = os.path.join(os.path.dirname(__file__), - 'test_migrations.conf') - # Test machines can set the TEST_MIGRATIONS_CONF variable - # to override the location of the config file for migration testing - self.CONFIG_FILE_PATH = os.environ.get('TEST_MIGRATIONS_CONF', - self.DEFAULT_CONFIG_FILE) - self.test_databases = {} - self.migration_api = None - - def setUp(self): - super(BaseMigrationTestCase, self).setUp() - - # Load test databases from the config file. Only do this - # once. No need to re-run this on each test... - LOG.debug('config_path is %s' % self.CONFIG_FILE_PATH) - if os.path.exists(self.CONFIG_FILE_PATH): - cp = moves.configparser.RawConfigParser() - try: - cp.read(self.CONFIG_FILE_PATH) - defaults = cp.defaults() - for key, value in defaults.items(): - self.test_databases[key] = value - except moves.configparser.ParsingError as e: - self.fail("Failed to read test_migrations.conf config " - "file. Got error: %s" % e) - else: - self.fail("Failed to find test_migrations.conf config " - "file.") - - self.engines = {} - for key, value in self.test_databases.items(): - self.engines[key] = sqlalchemy.create_engine(value) - - # We start each test case with a completely blank slate. - self._reset_databases() - - def tearDown(self): - # We destroy the test data store between each test case, - # and recreate it, which ensures that we have no side-effects - # from the tests - self._reset_databases() - super(BaseMigrationTestCase, self).tearDown() - - def execute_cmd(self, cmd=None): - process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, - stderr=subprocess.STDOUT) - output = process.communicate()[0] - LOG.debug(output) - self.assertEqual(0, process.returncode, - "Failed to run: %s\n%s" % (cmd, output)) - - def _reset_pg(self, conn_pieces): - (user, - password, - database, - host) = utils.get_db_connection_info(conn_pieces) - os.environ['PGPASSWORD'] = password - os.environ['PGUSER'] = user - # note(boris-42): We must create and drop database, we can't - # drop database which we have connected to, so for such - # operations there is a special database template1. - sqlcmd = ("psql -w -U %(user)s -h %(host)s -c" - " '%(sql)s' -d template1") - - sql = ("drop database if exists %s;") % database - droptable = sqlcmd % {'user': user, 'host': host, 'sql': sql} - self.execute_cmd(droptable) - - sql = ("create database %s;") % database - createtable = sqlcmd % {'user': user, 'host': host, 'sql': sql} - self.execute_cmd(createtable) - - os.unsetenv('PGPASSWORD') - os.unsetenv('PGUSER') - - @_set_db_lock(lock_prefix='migration_tests-') - def _reset_databases(self): - for key, engine in self.engines.items(): - conn_string = self.test_databases[key] - conn_pieces = urlutils.urlparse(conn_string) - engine.dispose() - if conn_string.startswith('sqlite'): - # We can just delete the SQLite database, which is - # the easiest and cleanest solution - db_path = conn_pieces.path.strip('/') - if os.path.exists(db_path): - os.unlink(db_path) - # No need to recreate the SQLite DB. SQLite will - # create it for us if it's not there... - elif conn_string.startswith('mysql'): - # We can execute the MySQL client to destroy and re-create - # the MYSQL database, which is easier and less error-prone - # than using SQLAlchemy to do this via MetaData...trust me. - (user, password, database, host) = \ - utils.get_db_connection_info(conn_pieces) - sql = ("drop database if exists %(db)s; " - "create database %(db)s;") % {'db': database} - cmd = ("mysql -u \"%(user)s\" -p\"%(password)s\" -h %(host)s " - "-e \"%(sql)s\"") % {'user': user, 'password': password, - 'host': host, 'sql': sql} - self.execute_cmd(cmd) - elif conn_string.startswith('postgresql'): - self._reset_pg(conn_pieces) - - -class WalkVersionsMixin(object): - def _walk_versions(self, engine=None, snake_walk=False, downgrade=True): - # Determine latest version script from the repo, then - # upgrade from 1 through to the latest, with no data - # in the databases. This just checks that the schema itself - # upgrades successfully. - - # Place the database under version control - self.migration_api.version_control(engine, self.REPOSITORY, - self.INIT_VERSION) - self.assertEqual(self.INIT_VERSION, - self.migration_api.db_version(engine, - self.REPOSITORY)) - - LOG.debug('latest version is %s' % self.REPOSITORY.latest) - versions = range(self.INIT_VERSION + 1, self.REPOSITORY.latest + 1) - - for version in versions: - # upgrade -> downgrade -> upgrade - self._migrate_up(engine, version, with_data=True) - if snake_walk: - downgraded = self._migrate_down( - engine, version - 1, with_data=True) - if downgraded: - self._migrate_up(engine, version) - - if downgrade: - # Now walk it back down to 0 from the latest, testing - # the downgrade paths. - for version in reversed(versions): - # downgrade -> upgrade -> downgrade - downgraded = self._migrate_down(engine, version - 1) - - if snake_walk and downgraded: - self._migrate_up(engine, version) - self._migrate_down(engine, version - 1) - - def _migrate_down(self, engine, version, with_data=False): - try: - self.migration_api.downgrade(engine, self.REPOSITORY, version) - except NotImplementedError: - # NOTE(sirp): some migrations, namely release-level - # migrations, don't support a downgrade. - return False - - self.assertEqual( - version, self.migration_api.db_version(engine, self.REPOSITORY)) - - # NOTE(sirp): `version` is what we're downgrading to (i.e. the 'target' - # version). So if we have any downgrade checks, they need to be run for - # the previous (higher numbered) migration. - if with_data: - post_downgrade = getattr( - self, "_post_downgrade_%03d" % (version + 1), None) - if post_downgrade: - post_downgrade(engine) - - return True - - def _migrate_up(self, engine, version, with_data=False): - """migrate up to a new version of the db. - - We allow for data insertion and post checks at every - migration version with special _pre_upgrade_### and - _check_### functions in the main test. - """ - # NOTE(sdague): try block is here because it's impossible to debug - # where a failed data migration happens otherwise - try: - if with_data: - data = None - pre_upgrade = getattr( - self, "_pre_upgrade_%03d" % version, None) - if pre_upgrade: - data = pre_upgrade(engine) - - self.migration_api.upgrade(engine, self.REPOSITORY, version) - self.assertEqual(version, - self.migration_api.db_version(engine, - self.REPOSITORY)) - if with_data: - check = getattr(self, "_check_%03d" % version, None) - if check: - check(engine, data) - except Exception: - LOG.error("Failed to migrate to version %s on engine %s" % - (version, engine)) - raise diff --git a/gceapi/openstack/common/db/sqlalchemy/utils.py b/gceapi/openstack/common/db/sqlalchemy/utils.py deleted file mode 100644 index c5386aa..0000000 --- a/gceapi/openstack/common/db/sqlalchemy/utils.py +++ /dev/null @@ -1,548 +0,0 @@ -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# Copyright 2010-2011 OpenStack Foundation. -# Copyright 2012 Justin Santa Barbara -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import re - -from migrate.changeset import UniqueConstraint -import sqlalchemy -from sqlalchemy import Boolean -from sqlalchemy import CheckConstraint -from sqlalchemy import Column -from sqlalchemy.engine import reflection -from sqlalchemy.ext.compiler import compiles -from sqlalchemy import func -from sqlalchemy import Index -from sqlalchemy import Integer -from sqlalchemy import MetaData -from sqlalchemy.sql.expression import literal_column -from sqlalchemy.sql.expression import UpdateBase -from sqlalchemy.sql import select -from sqlalchemy import String -from sqlalchemy import Table -from sqlalchemy.types import NullType - -from gceapi.openstack.common.gettextutils import _ - -from gceapi.openstack.common import log as logging -from gceapi.openstack.common import timeutils - - -LOG = logging.getLogger(__name__) - -_DBURL_REGEX = re.compile(r"[^:]+://([^:]+):([^@]+)@.+") - - -def sanitize_db_url(url): - match = _DBURL_REGEX.match(url) - if match: - return '%s****:****%s' % (url[:match.start(1)], url[match.end(2):]) - return url - - -class InvalidSortKey(Exception): - message = _("Sort key supplied was not valid.") - - -# copy from glance/db/sqlalchemy/api.py -def paginate_query(query, model, limit, sort_keys, marker=None, - sort_dir=None, sort_dirs=None): - """Returns a query with sorting / pagination criteria added. - - Pagination works by requiring a unique sort_key, specified by sort_keys. - (If sort_keys is not unique, then we risk looping through values.) - We use the last row in the previous page as the 'marker' for pagination. - So we must return values that follow the passed marker in the order. - With a single-valued sort_key, this would be easy: sort_key > X. - With a compound-values sort_key, (k1, k2, k3) we must do this to repeat - the lexicographical ordering: - (k1 > X1) or (k1 == X1 && k2 > X2) or (k1 == X1 && k2 == X2 && k3 > X3) - - We also have to cope with different sort_directions. - - Typically, the id of the last row is used as the client-facing pagination - marker, then the actual marker object must be fetched from the db and - passed in to us as marker. - - :param query: the query object to which we should add paging/sorting - :param model: the ORM model class - :param limit: maximum number of items to return - :param sort_keys: array of attributes by which results should be sorted - :param marker: the last item of the previous page; we returns the next - results after this value. - :param sort_dir: direction in which results should be sorted (asc, desc) - :param sort_dirs: per-column array of sort_dirs, corresponding to sort_keys - - :rtype: sqlalchemy.orm.query.Query - :return: The query with sorting/pagination added. - """ - - if 'id' not in sort_keys: - # TODO(justinsb): If this ever gives a false-positive, check - # the actual primary key, rather than assuming its id - LOG.warning(_('Id not in sort_keys; is sort_keys unique?')) - - assert(not (sort_dir and sort_dirs)) - - # Default the sort direction to ascending - if sort_dirs is None and sort_dir is None: - sort_dir = 'asc' - - # Ensure a per-column sort direction - if sort_dirs is None: - sort_dirs = [sort_dir for _sort_key in sort_keys] - - assert(len(sort_dirs) == len(sort_keys)) - - # Add sorting - for current_sort_key, current_sort_dir in zip(sort_keys, sort_dirs): - try: - sort_dir_func = { - 'asc': sqlalchemy.asc, - 'desc': sqlalchemy.desc, - }[current_sort_dir] - except KeyError: - raise ValueError(_("Unknown sort direction, " - "must be 'desc' or 'asc'")) - try: - sort_key_attr = getattr(model, current_sort_key) - except AttributeError: - raise InvalidSortKey() - query = query.order_by(sort_dir_func(sort_key_attr)) - - # Add pagination - if marker is not None: - marker_values = [] - for sort_key in sort_keys: - v = getattr(marker, sort_key) - marker_values.append(v) - - # Build up an array of sort criteria as in the docstring - criteria_list = [] - for i in range(len(sort_keys)): - crit_attrs = [] - for j in range(i): - model_attr = getattr(model, sort_keys[j]) - crit_attrs.append((model_attr == marker_values[j])) - - model_attr = getattr(model, sort_keys[i]) - if sort_dirs[i] == 'desc': - crit_attrs.append((model_attr < marker_values[i])) - else: - crit_attrs.append((model_attr > marker_values[i])) - - criteria = sqlalchemy.sql.and_(*crit_attrs) - criteria_list.append(criteria) - - f = sqlalchemy.sql.or_(*criteria_list) - query = query.filter(f) - - if limit is not None: - query = query.limit(limit) - - return query - - -def get_table(engine, name): - """Returns an sqlalchemy table dynamically from db. - - Needed because the models don't work for us in migrations - as models will be far out of sync with the current data. - """ - metadata = MetaData() - metadata.bind = engine - return Table(name, metadata, autoload=True) - - -class InsertFromSelect(UpdateBase): - """Form the base for `INSERT INTO table (SELECT ... )` statement.""" - def __init__(self, table, select): - self.table = table - self.select = select - - -@compiles(InsertFromSelect) -def visit_insert_from_select(element, compiler, **kw): - """Form the `INSERT INTO table (SELECT ... )` statement.""" - return "INSERT INTO %s %s" % ( - compiler.process(element.table, asfrom=True), - compiler.process(element.select)) - - -class ColumnError(Exception): - """Error raised when no column or an invalid column is found.""" - - -def _get_not_supported_column(col_name_col_instance, column_name): - try: - column = col_name_col_instance[column_name] - except KeyError: - msg = _("Please specify column %s in col_name_col_instance " - "param. It is required because column has unsupported " - "type by sqlite).") - raise ColumnError(msg % column_name) - - if not isinstance(column, Column): - msg = _("col_name_col_instance param has wrong type of " - "column instance for column %s It should be instance " - "of sqlalchemy.Column.") - raise ColumnError(msg % column_name) - return column - - -def drop_unique_constraint(migrate_engine, table_name, uc_name, *columns, - **col_name_col_instance): - """Drop unique constraint from table. - - This method drops UC from table and works for mysql, postgresql and sqlite. - In mysql and postgresql we are able to use "alter table" construction. - Sqlalchemy doesn't support some sqlite column types and replaces their - type with NullType in metadata. We process these columns and replace - NullType with the correct column type. - - :param migrate_engine: sqlalchemy engine - :param table_name: name of table that contains uniq constraint. - :param uc_name: name of uniq constraint that will be dropped. - :param columns: columns that are in uniq constraint. - :param col_name_col_instance: contains pair column_name=column_instance. - column_instance is instance of Column. These params - are required only for columns that have unsupported - types by sqlite. For example BigInteger. - """ - - meta = MetaData() - meta.bind = migrate_engine - t = Table(table_name, meta, autoload=True) - - if migrate_engine.name == "sqlite": - override_cols = [ - _get_not_supported_column(col_name_col_instance, col.name) - for col in t.columns - if isinstance(col.type, NullType) - ] - for col in override_cols: - t.columns.replace(col) - - uc = UniqueConstraint(*columns, table=t, name=uc_name) - uc.drop() - - -def drop_old_duplicate_entries_from_table(migrate_engine, table_name, - use_soft_delete, *uc_column_names): - """Drop all old rows having the same values for columns in uc_columns. - - This method drop (or mark ad `deleted` if use_soft_delete is True) old - duplicate rows form table with name `table_name`. - - :param migrate_engine: Sqlalchemy engine - :param table_name: Table with duplicates - :param use_soft_delete: If True - values will be marked as `deleted`, - if False - values will be removed from table - :param uc_column_names: Unique constraint columns - """ - meta = MetaData() - meta.bind = migrate_engine - - table = Table(table_name, meta, autoload=True) - columns_for_group_by = [table.c[name] for name in uc_column_names] - - columns_for_select = [func.max(table.c.id)] - columns_for_select.extend(columns_for_group_by) - - duplicated_rows_select = select(columns_for_select, - group_by=columns_for_group_by, - having=func.count(table.c.id) > 1) - - for row in migrate_engine.execute(duplicated_rows_select): - # NOTE(boris-42): Do not remove row that has the biggest ID. - delete_condition = table.c.id != row[0] - is_none = None # workaround for pyflakes - delete_condition &= table.c.deleted_at == is_none - for name in uc_column_names: - delete_condition &= table.c[name] == row[name] - - rows_to_delete_select = select([table.c.id]).where(delete_condition) - for row in migrate_engine.execute(rows_to_delete_select).fetchall(): - LOG.info(_("Deleting duplicated row with id: %(id)s from table: " - "%(table)s") % dict(id=row[0], table=table_name)) - - if use_soft_delete: - delete_statement = table.update().\ - where(delete_condition).\ - values({ - 'deleted': literal_column('id'), - 'updated_at': literal_column('updated_at'), - 'deleted_at': timeutils.utcnow() - }) - else: - delete_statement = table.delete().where(delete_condition) - migrate_engine.execute(delete_statement) - - -def _get_default_deleted_value(table): - if isinstance(table.c.id.type, Integer): - return 0 - if isinstance(table.c.id.type, String): - return "" - raise ColumnError(_("Unsupported id columns type")) - - -def _restore_indexes_on_deleted_columns(migrate_engine, table_name, indexes): - table = get_table(migrate_engine, table_name) - - insp = reflection.Inspector.from_engine(migrate_engine) - real_indexes = insp.get_indexes(table_name) - existing_index_names = dict( - [(index['name'], index['column_names']) for index in real_indexes]) - - # NOTE(boris-42): Restore indexes on `deleted` column - for index in indexes: - if 'deleted' not in index['column_names']: - continue - name = index['name'] - if name in existing_index_names: - column_names = [table.c[c] for c in existing_index_names[name]] - old_index = Index(name, *column_names, unique=index["unique"]) - old_index.drop(migrate_engine) - - column_names = [table.c[c] for c in index['column_names']] - new_index = Index(index["name"], *column_names, unique=index["unique"]) - new_index.create(migrate_engine) - - -def change_deleted_column_type_to_boolean(migrate_engine, table_name, - **col_name_col_instance): - if migrate_engine.name == "sqlite": - return _change_deleted_column_type_to_boolean_sqlite( - migrate_engine, table_name, **col_name_col_instance) - insp = reflection.Inspector.from_engine(migrate_engine) - indexes = insp.get_indexes(table_name) - - table = get_table(migrate_engine, table_name) - - old_deleted = Column('old_deleted', Boolean, default=False) - old_deleted.create(table, populate_default=False) - - table.update().\ - where(table.c.deleted == table.c.id).\ - values(old_deleted=True).\ - execute() - - table.c.deleted.drop() - table.c.old_deleted.alter(name="deleted") - - _restore_indexes_on_deleted_columns(migrate_engine, table_name, indexes) - - -def _change_deleted_column_type_to_boolean_sqlite(migrate_engine, table_name, - **col_name_col_instance): - insp = reflection.Inspector.from_engine(migrate_engine) - table = get_table(migrate_engine, table_name) - - columns = [] - for column in table.columns: - column_copy = None - if column.name != "deleted": - if isinstance(column.type, NullType): - column_copy = _get_not_supported_column(col_name_col_instance, - column.name) - else: - column_copy = column.copy() - else: - column_copy = Column('deleted', Boolean, default=0) - columns.append(column_copy) - - constraints = [constraint.copy() for constraint in table.constraints] - - meta = table.metadata - new_table = Table(table_name + "__tmp__", meta, - *(columns + constraints)) - new_table.create() - - indexes = [] - for index in insp.get_indexes(table_name): - column_names = [new_table.c[c] for c in index['column_names']] - indexes.append(Index(index["name"], *column_names, - unique=index["unique"])) - - c_select = [] - for c in table.c: - if c.name != "deleted": - c_select.append(c) - else: - c_select.append(table.c.deleted == table.c.id) - - ins = InsertFromSelect(new_table, select(c_select)) - migrate_engine.execute(ins) - - table.drop() - [index.create(migrate_engine) for index in indexes] - - new_table.rename(table_name) - new_table.update().\ - where(new_table.c.deleted == new_table.c.id).\ - values(deleted=True).\ - execute() - - -def change_deleted_column_type_to_id_type(migrate_engine, table_name, - **col_name_col_instance): - if migrate_engine.name == "sqlite": - return _change_deleted_column_type_to_id_type_sqlite( - migrate_engine, table_name, **col_name_col_instance) - insp = reflection.Inspector.from_engine(migrate_engine) - indexes = insp.get_indexes(table_name) - - table = get_table(migrate_engine, table_name) - - new_deleted = Column('new_deleted', table.c.id.type, - default=_get_default_deleted_value(table)) - new_deleted.create(table, populate_default=True) - - deleted = True # workaround for pyflakes - table.update().\ - where(table.c.deleted == deleted).\ - values(new_deleted=table.c.id).\ - execute() - table.c.deleted.drop() - table.c.new_deleted.alter(name="deleted") - - _restore_indexes_on_deleted_columns(migrate_engine, table_name, indexes) - - -def _change_deleted_column_type_to_id_type_sqlite(migrate_engine, table_name, - **col_name_col_instance): - # NOTE(boris-42): sqlaclhemy-migrate can't drop column with check - # constraints in sqlite DB and our `deleted` column has - # 2 check constraints. So there is only one way to remove - # these constraints: - # 1) Create new table with the same columns, constraints - # and indexes. (except deleted column). - # 2) Copy all data from old to new table. - # 3) Drop old table. - # 4) Rename new table to old table name. - insp = reflection.Inspector.from_engine(migrate_engine) - meta = MetaData(bind=migrate_engine) - table = Table(table_name, meta, autoload=True) - default_deleted_value = _get_default_deleted_value(table) - - columns = [] - for column in table.columns: - column_copy = None - if column.name != "deleted": - if isinstance(column.type, NullType): - column_copy = _get_not_supported_column(col_name_col_instance, - column.name) - else: - column_copy = column.copy() - else: - column_copy = Column('deleted', table.c.id.type, - default=default_deleted_value) - columns.append(column_copy) - - def is_deleted_column_constraint(constraint): - # NOTE(boris-42): There is no other way to check is CheckConstraint - # associated with deleted column. - if not isinstance(constraint, CheckConstraint): - return False - sqltext = str(constraint.sqltext) - return (sqltext.endswith("deleted in (0, 1)") or - sqltext.endswith("deleted IN (:deleted_1, :deleted_2)")) - - constraints = [] - for constraint in table.constraints: - if not is_deleted_column_constraint(constraint): - constraints.append(constraint.copy()) - - new_table = Table(table_name + "__tmp__", meta, - *(columns + constraints)) - new_table.create() - - indexes = [] - for index in insp.get_indexes(table_name): - column_names = [new_table.c[c] for c in index['column_names']] - indexes.append(Index(index["name"], *column_names, - unique=index["unique"])) - - ins = InsertFromSelect(new_table, table.select()) - migrate_engine.execute(ins) - - table.drop() - [index.create(migrate_engine) for index in indexes] - - new_table.rename(table_name) - deleted = True # workaround for pyflakes - new_table.update().\ - where(new_table.c.deleted == deleted).\ - values(deleted=new_table.c.id).\ - execute() - - # NOTE(boris-42): Fix value of deleted column: False -> "" or 0. - deleted = False # workaround for pyflakes - new_table.update().\ - where(new_table.c.deleted == deleted).\ - values(deleted=default_deleted_value).\ - execute() - - -def get_connect_string(backend, database, user=None, passwd=None): - """Get database connection - - Try to get a connection with a very specific set of values, if we get - these then we'll run the tests, otherwise they are skipped - """ - args = {'backend': backend, - 'user': user, - 'passwd': passwd, - 'database': database} - if backend == 'sqlite': - template = '%(backend)s:///%(database)s' - else: - template = "%(backend)s://%(user)s:%(passwd)s@localhost/%(database)s" - return template % args - - -def is_backend_avail(backend, database, user=None, passwd=None): - try: - connect_uri = get_connect_string(backend=backend, - database=database, - user=user, - passwd=passwd) - engine = sqlalchemy.create_engine(connect_uri) - connection = engine.connect() - except Exception: - # intentionally catch all to handle exceptions even if we don't - # have any backend code loaded. - return False - else: - connection.close() - engine.dispose() - return True - - -def get_db_connection_info(conn_pieces): - database = conn_pieces.path.strip('/') - loc_pieces = conn_pieces.netloc.split('@') - host = loc_pieces[1] - - auth_pieces = loc_pieces[0].split(':') - user = auth_pieces[0] - password = "" - if len(auth_pieces) > 1: - password = auth_pieces[1].strip() - - return (user, password, database, host) diff --git a/gceapi/openstack/common/eventlet_backdoor.py b/gceapi/openstack/common/eventlet_backdoor.py index 136cbd7..710a3a2 100644 --- a/gceapi/openstack/common/eventlet_backdoor.py +++ b/gceapi/openstack/common/eventlet_backdoor.py @@ -16,21 +16,21 @@ from __future__ import print_function +import copy import errno import gc +import logging import os import pprint import socket import sys import traceback -import eventlet import eventlet.backdoor import greenlet -from oslo.config import cfg +from oslo_config import cfg -from gceapi.openstack.common.gettextutils import _ -from gceapi.openstack.common import log as logging +from gceapi.openstack.common._i18n import _LI help_for_backdoor_port = ( "Acceptable values are 0, , and :, where 0 results " @@ -41,7 +41,6 @@ help_for_backdoor_port = ( "chosen port is displayed in the service's log file.") eventlet_backdoor_opts = [ cfg.StrOpt('backdoor_port', - default=None, help="Enable eventlet backdoor. %s" % help_for_backdoor_port) ] @@ -50,6 +49,12 @@ CONF.register_opts(eventlet_backdoor_opts) LOG = logging.getLogger(__name__) +def list_opts(): + """Entry point for oslo-config-generator. + """ + return [(None, copy.deepcopy(eventlet_backdoor_opts))] + + class EventletBackdoorConfigValueError(Exception): def __init__(self, port_range, help_msg, ex): msg = ('Invalid backdoor_port configuration %(range)s: %(ex)s. ' @@ -137,8 +142,10 @@ def initialize_if_enabled(): # In the case of backdoor port being zero, a port number is assigned by # listen(). In any case, pull the port number out here. port = sock.getsockname()[1] - LOG.info(_('Eventlet backdoor listening on %(port)s for process %(pid)d') % - {'port': port, 'pid': os.getpid()}) + LOG.info( + _LI('Eventlet backdoor listening on %(port)s for process %(pid)d'), + {'port': port, 'pid': os.getpid()} + ) eventlet.spawn_n(eventlet.backdoor.backdoor_server, sock, locals=backdoor_locals) return port diff --git a/gceapi/openstack/common/excutils.py b/gceapi/openstack/common/excutils.py deleted file mode 100644 index 5a7cf14..0000000 --- a/gceapi/openstack/common/excutils.py +++ /dev/null @@ -1,99 +0,0 @@ -# Copyright 2011 OpenStack Foundation. -# Copyright 2012, Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Exception related utilities. -""" - -import logging -import sys -import time -import traceback - -import six - -from gceapi.openstack.common.gettextutils import _ - - -class save_and_reraise_exception(object): - """Save current exception, run some code and then re-raise. - - In some cases the exception context can be cleared, resulting in None - being attempted to be re-raised after an exception handler is run. This - can happen when eventlet switches greenthreads or when running an - exception handler, code raises and catches an exception. In both - cases the exception context will be cleared. - - To work around this, we save the exception state, run handler code, and - then re-raise the original exception. If another exception occurs, the - saved exception is logged and the new exception is re-raised. - - In some cases the caller may not want to re-raise the exception, and - for those circumstances this context provides a reraise flag that - can be used to suppress the exception. For example:: - - except Exception: - with save_and_reraise_exception() as ctxt: - decide_if_need_reraise() - if not should_be_reraised: - ctxt.reraise = False - """ - def __init__(self): - self.reraise = True - - def __enter__(self): - self.type_, self.value, self.tb, = sys.exc_info() - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - if exc_type is not None: - logging.error(_('Original exception being dropped: %s'), - traceback.format_exception(self.type_, - self.value, - self.tb)) - return False - if self.reraise: - six.reraise(self.type_, self.value, self.tb) - - -def forever_retry_uncaught_exceptions(infunc): - def inner_func(*args, **kwargs): - last_log_time = 0 - last_exc_message = None - exc_count = 0 - while True: - try: - return infunc(*args, **kwargs) - except Exception as exc: - this_exc_message = six.u(str(exc)) - if this_exc_message == last_exc_message: - exc_count += 1 - else: - exc_count = 1 - # Do not log any more frequently than once a minute unless - # the exception message changes - cur_time = int(time.time()) - if (cur_time - last_log_time > 60 or - this_exc_message != last_exc_message): - logging.exception( - _('Unexpected exception occurred %d time(s)... ' - 'retrying.') % exc_count) - last_log_time = cur_time - last_exc_message = this_exc_message - exc_count = 0 - # This should be a very rare event. In case it isn't, do - # a sleep. - time.sleep(1) - return inner_func diff --git a/gceapi/openstack/common/gettextutils.py b/gceapi/openstack/common/gettextutils.py deleted file mode 100644 index b5c245f..0000000 --- a/gceapi/openstack/common/gettextutils.py +++ /dev/null @@ -1,440 +0,0 @@ -# Copyright 2012 Red Hat, Inc. -# Copyright 2013 IBM Corp. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -gettext for openstack-common modules. - -Usual usage in an openstack.common module: - - from gceapi.openstack.common.gettextutils import _ -""" - -import copy -import gettext -import locale -from logging import handlers -import os -import re - -from babel import localedata -import six - -_localedir = os.environ.get('gceapi'.upper() + '_LOCALEDIR') -_t = gettext.translation('gceapi', localedir=_localedir, fallback=True) - -_AVAILABLE_LANGUAGES = {} -USE_LAZY = False - - -def enable_lazy(): - """Convenience function for configuring _() to use lazy gettext - - Call this at the start of execution to enable the gettextutils._ - function to use lazy gettext functionality. This is useful if - your project is importing _ directly instead of using the - gettextutils.install() way of importing the _ function. - """ - global USE_LAZY - USE_LAZY = True - - -def _(msg): - if USE_LAZY: - return Message(msg, domain='gceapi') - else: - if six.PY3: - return _t.gettext(msg) - return _t.ugettext(msg) - - -def install(domain, lazy=False): - """Install a _() function using the given translation domain. - - Given a translation domain, install a _() function using gettext's - install() function. - - The main difference from gettext.install() is that we allow - overriding the default localedir (e.g. /usr/share/locale) using - a translation-domain-specific environment variable (e.g. - NOVA_LOCALEDIR). - - :param domain: the translation domain - :param lazy: indicates whether or not to install the lazy _() function. - The lazy _() introduces a way to do deferred translation - of messages by installing a _ that builds Message objects, - instead of strings, which can then be lazily translated into - any available locale. - """ - if lazy: - # NOTE(mrodden): Lazy gettext functionality. - # - # The following introduces a deferred way to do translations on - # messages in OpenStack. We override the standard _() function - # and % (format string) operation to build Message objects that can - # later be translated when we have more information. - def _lazy_gettext(msg): - """Create and return a Message object. - - Lazy gettext function for a given domain, it is a factory method - for a project/module to get a lazy gettext function for its own - translation domain (i.e. nova, glance, cinder, etc.) - - Message encapsulates a string so that we can translate - it later when needed. - """ - return Message(msg, domain=domain) - - from six import moves - moves.builtins.__dict__['_'] = _lazy_gettext - else: - localedir = '%s_LOCALEDIR' % domain.upper() - if six.PY3: - gettext.install(domain, - localedir=os.environ.get(localedir)) - else: - gettext.install(domain, - localedir=os.environ.get(localedir), - unicode=True) - - -class Message(six.text_type): - """A Message object is a unicode object that can be translated. - - Translation of Message is done explicitly using the translate() method. - For all non-translation intents and purposes, a Message is simply unicode, - and can be treated as such. - """ - - def __new__(cls, msgid, msgtext=None, params=None, - domain='gceapi', *args): - """Create a new Message object. - - In order for translation to work gettext requires a message ID, this - msgid will be used as the base unicode text. It is also possible - for the msgid and the base unicode text to be different by passing - the msgtext parameter. - """ - # If the base msgtext is not given, we use the default translation - # of the msgid (which is in English) just in case the system locale is - # not English, so that the base text will be in that locale by default. - if not msgtext: - msgtext = Message._translate_msgid(msgid, domain) - # We want to initialize the parent unicode with the actual object that - # would have been plain unicode if 'Message' was not enabled. - msg = super(Message, cls).__new__(cls, msgtext) - msg.msgid = msgid - msg.domain = domain - msg.params = params - return msg - - def translate(self, desired_locale=None): - """Translate this message to the desired locale. - - :param desired_locale: The desired locale to translate the message to, - if no locale is provided the message will be - translated to the system's default locale. - - :returns: the translated message in unicode - """ - - translated_message = Message._translate_msgid(self.msgid, - self.domain, - desired_locale) - if self.params is None: - # No need for more translation - return translated_message - - # This Message object may have been formatted with one or more - # Message objects as substitution arguments, given either as a single - # argument, part of a tuple, or as one or more values in a dictionary. - # When translating this Message we need to translate those Messages too - translated_params = _translate_args(self.params, desired_locale) - - translated_message = translated_message % translated_params - - return translated_message - - @staticmethod - def _translate_msgid(msgid, domain, desired_locale=None): - if not desired_locale: - system_locale = locale.getdefaultlocale() - # If the system locale is not available to the runtime use English - if not system_locale[0]: - desired_locale = 'en_US' - else: - desired_locale = system_locale[0] - - locale_dir = os.environ.get(domain.upper() + '_LOCALEDIR') - lang = gettext.translation(domain, - localedir=locale_dir, - languages=[desired_locale], - fallback=True) - if six.PY3: - translator = lang.gettext - else: - translator = lang.ugettext - - translated_message = translator(msgid) - return translated_message - - def __mod__(self, other): - # When we mod a Message we want the actual operation to be performed - # by the parent class (i.e. unicode()), the only thing we do here is - # save the original msgid and the parameters in case of a translation - params = self._sanitize_mod_params(other) - unicode_mod = super(Message, self).__mod__(params) - modded = Message(self.msgid, - msgtext=unicode_mod, - params=params, - domain=self.domain) - return modded - - def _sanitize_mod_params(self, other): - """Sanitize the object being modded with this Message. - - - Add support for modding 'None' so translation supports it - - Trim the modded object, which can be a large dictionary, to only - those keys that would actually be used in a translation - - Snapshot the object being modded, in case the message is - translated, it will be used as it was when the Message was created - """ - if other is None: - params = (other,) - elif isinstance(other, dict): - params = self._trim_dictionary_parameters(other) - else: - params = self._copy_param(other) - return params - - def _trim_dictionary_parameters(self, dict_param): - """Return a dict that only has matching entries in the msgid.""" - # NOTE(luisg): Here we trim down the dictionary passed as parameters - # to avoid carrying a lot of unnecessary weight around in the message - # object, for example if someone passes in Message() % locals() but - # only some params are used, and additionally we prevent errors for - # non-deepcopyable objects by unicoding() them. - - # Look for %(param) keys in msgid; - # Skip %% and deal with the case where % is first character on the line - keys = re.findall('(?:[^%]|^)?%\((\w*)\)[a-z]', self.msgid) - - # If we don't find any %(param) keys but have a %s - if not keys and re.findall('(?:[^%]|^)%[a-z]', self.msgid): - # Apparently the full dictionary is the parameter - params = self._copy_param(dict_param) - else: - params = {} - # Save our existing parameters as defaults to protect - # ourselves from losing values if we are called through an - # (erroneous) chain that builds a valid Message with - # arguments, and then does something like "msg % kwds" - # where kwds is an empty dictionary. - src = {} - if isinstance(self.params, dict): - src.update(self.params) - src.update(dict_param) - for key in keys: - params[key] = self._copy_param(src[key]) - - return params - - def _copy_param(self, param): - try: - return copy.deepcopy(param) - except TypeError: - # Fallback to casting to unicode this will handle the - # python code-like objects that can't be deep-copied - return six.text_type(param) - - def __add__(self, other): - msg = _('Message objects do not support addition.') - raise TypeError(msg) - - def __radd__(self, other): - return self.__add__(other) - - def __str__(self): - # NOTE(luisg): Logging in python 2.6 tries to str() log records, - # and it expects specifically a UnicodeError in order to proceed. - msg = _('Message objects do not support str() because they may ' - 'contain non-ascii characters. ' - 'Please use unicode() or translate() instead.') - raise UnicodeError(msg) - - -def get_available_languages(domain): - """Lists the available languages for the given translation domain. - - :param domain: the domain to get languages for - """ - if domain in _AVAILABLE_LANGUAGES: - return copy.copy(_AVAILABLE_LANGUAGES[domain]) - - localedir = '%s_LOCALEDIR' % domain.upper() - find = lambda x: gettext.find(domain, - localedir=os.environ.get(localedir), - languages=[x]) - - # NOTE(mrodden): en_US should always be available (and first in case - # order matters) since our in-line message strings are en_US - language_list = ['en_US'] - # NOTE(luisg): Babel <1.0 used a function called list(), which was - # renamed to locale_identifiers() in >=1.0, the requirements master list - # requires >=0.9.6, uncapped, so defensively work with both. We can remove - # this check when the master list updates to >=1.0, and update all projects - list_identifiers = (getattr(localedata, 'list', None) or - getattr(localedata, 'locale_identifiers')) - locale_identifiers = list_identifiers() - - for i in locale_identifiers: - if find(i) is not None: - language_list.append(i) - - # NOTE(luisg): Babel>=1.0,<1.3 has a bug where some OpenStack supported - # locales (e.g. 'zh_CN', and 'zh_TW') aren't supported even though they - # are perfectly legitimate locales: - # https://github.com/mitsuhiko/babel/issues/37 - # In Babel 1.3 they fixed the bug and they support these locales, but - # they are still not explicitly "listed" by locale_identifiers(). - # That is why we add the locales here explicitly if necessary so that - # they are listed as supported. - aliases = {'zh': 'zh_CN', - 'zh_Hant_HK': 'zh_HK', - 'zh_Hant': 'zh_TW', - 'fil': 'tl_PH'} - for (locale, alias) in six.iteritems(aliases): - if locale in language_list and alias not in language_list: - language_list.append(alias) - - _AVAILABLE_LANGUAGES[domain] = language_list - return copy.copy(language_list) - - -def translate(obj, desired_locale=None): - """Gets the translated unicode representation of the given object. - - If the object is not translatable it is returned as-is. - If the locale is None the object is translated to the system locale. - - :param obj: the object to translate - :param desired_locale: the locale to translate the message to, if None the - default system locale will be used - :returns: the translated object in unicode, or the original object if - it could not be translated - """ - message = obj - if not isinstance(message, Message): - # If the object to translate is not already translatable, - # let's first get its unicode representation - message = six.text_type(obj) - if isinstance(message, Message): - # Even after unicoding() we still need to check if we are - # running with translatable unicode before translating - return message.translate(desired_locale) - return obj - - -def _translate_args(args, desired_locale=None): - """Translates all the translatable elements of the given arguments object. - - This method is used for translating the translatable values in method - arguments which include values of tuples or dictionaries. - If the object is not a tuple or a dictionary the object itself is - translated if it is translatable. - - If the locale is None the object is translated to the system locale. - - :param args: the args to translate - :param desired_locale: the locale to translate the args to, if None the - default system locale will be used - :returns: a new args object with the translated contents of the original - """ - if isinstance(args, tuple): - return tuple(translate(v, desired_locale) for v in args) - if isinstance(args, dict): - translated_dict = {} - for (k, v) in six.iteritems(args): - translated_v = translate(v, desired_locale) - translated_dict[k] = translated_v - return translated_dict - return translate(args, desired_locale) - - -class TranslationHandler(handlers.MemoryHandler): - """Handler that translates records before logging them. - - The TranslationHandler takes a locale and a target logging.Handler object - to forward LogRecord objects to after translating them. This handler - depends on Message objects being logged, instead of regular strings. - - The handler can be configured declaratively in the logging.conf as follows: - - [handlers] - keys = translatedlog, translator - - [handler_translatedlog] - class = handlers.WatchedFileHandler - args = ('/var/log/api-localized.log',) - formatter = context - - [handler_translator] - class = openstack.common.log.TranslationHandler - target = translatedlog - args = ('zh_CN',) - - If the specified locale is not available in the system, the handler will - log in the default locale. - """ - - def __init__(self, locale=None, target=None): - """Initialize a TranslationHandler - - :param locale: locale to use for translating messages - :param target: logging.Handler object to forward - LogRecord objects to after translation - """ - # NOTE(luisg): In order to allow this handler to be a wrapper for - # other handlers, such as a FileHandler, and still be able to - # configure it using logging.conf, this handler has to extend - # MemoryHandler because only the MemoryHandlers' logging.conf - # parsing is implemented such that it accepts a target handler. - handlers.MemoryHandler.__init__(self, capacity=0, target=target) - self.locale = locale - - def setFormatter(self, fmt): - self.target.setFormatter(fmt) - - def emit(self, record): - # We save the message from the original record to restore it - # after translation, so other handlers are not affected by this - original_msg = record.msg - original_args = record.args - - try: - self._translate_and_log_record(record) - finally: - record.msg = original_msg - record.args = original_args - - def _translate_and_log_record(self, record): - record.msg = translate(record.msg, self.locale) - - # In addition to translating the message, we also need to translate - # arguments that were passed to the log method that were not part - # of the main message e.g., log.info(_('Some message %s'), this_one)) - record.args = _translate_args(record.args, self.locale) - - self.target.emit(record) diff --git a/gceapi/openstack/common/importutils.py b/gceapi/openstack/common/importutils.py deleted file mode 100644 index 4fd9ae2..0000000 --- a/gceapi/openstack/common/importutils.py +++ /dev/null @@ -1,66 +0,0 @@ -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Import related utilities and helper functions. -""" - -import sys -import traceback - - -def import_class(import_str): - """Returns a class from a string including module and class.""" - mod_str, _sep, class_str = import_str.rpartition('.') - try: - __import__(mod_str) - return getattr(sys.modules[mod_str], class_str) - except (ValueError, AttributeError): - raise ImportError('Class %s cannot be found (%s)' % - (class_str, - traceback.format_exception(*sys.exc_info()))) - - -def import_object(import_str, *args, **kwargs): - """Import a class and return an instance of it.""" - return import_class(import_str)(*args, **kwargs) - - -def import_object_ns(name_space, import_str, *args, **kwargs): - """Tries to import object from default namespace. - - Imports a class and return an instance of it, first by trying - to find the class in a default namespace, then failing back to - a full path if not found in the default namespace. - """ - import_value = "%s.%s" % (name_space, import_str) - try: - return import_class(import_value)(*args, **kwargs) - except ImportError: - return import_class(import_str)(*args, **kwargs) - - -def import_module(import_str): - """Import a module.""" - __import__(import_str) - return sys.modules[import_str] - - -def try_import(import_str, default=None): - """Try to import a module and if it fails return default.""" - try: - return import_module(import_str) - except ImportError: - return default diff --git a/gceapi/openstack/common/jsonutils.py b/gceapi/openstack/common/jsonutils.py deleted file mode 100644 index a4ab5b3..0000000 --- a/gceapi/openstack/common/jsonutils.py +++ /dev/null @@ -1,182 +0,0 @@ -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# Copyright 2011 Justin Santa Barbara -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -''' -JSON related utilities. - -This module provides a few things: - - 1) A handy function for getting an object down to something that can be - JSON serialized. See to_primitive(). - - 2) Wrappers around loads() and dumps(). The dumps() wrapper will - automatically use to_primitive() for you if needed. - - 3) This sets up anyjson to use the loads() and dumps() wrappers if anyjson - is available. -''' - - -import datetime -import functools -import inspect -import itertools -import json -try: - import xmlrpclib -except ImportError: - # NOTE(jaypipes): xmlrpclib was renamed to xmlrpc.client in Python3 - # however the function and object call signatures - # remained the same. This whole try/except block should - # be removed and replaced with a call to six.moves once - # six 1.4.2 is released. See http://bit.ly/1bqrVzu - import xmlrpc.client as xmlrpclib - -import six - -from gceapi.openstack.common import gettextutils -from gceapi.openstack.common import importutils -from gceapi.openstack.common import timeutils - -netaddr = importutils.try_import("netaddr") - -_nasty_type_tests = [inspect.ismodule, inspect.isclass, inspect.ismethod, - inspect.isfunction, inspect.isgeneratorfunction, - inspect.isgenerator, inspect.istraceback, inspect.isframe, - inspect.iscode, inspect.isbuiltin, inspect.isroutine, - inspect.isabstract] - -_simple_types = (six.string_types + six.integer_types - + (type(None), bool, float)) - - -def to_primitive(value, convert_instances=False, convert_datetime=True, - level=0, max_depth=3): - """Convert a complex object into primitives. - - Handy for JSON serialization. We can optionally handle instances, - but since this is a recursive function, we could have cyclical - data structures. - - To handle cyclical data structures we could track the actual objects - visited in a set, but not all objects are hashable. Instead we just - track the depth of the object inspections and don't go too deep. - - Therefore, convert_instances=True is lossy ... be aware. - - """ - # handle obvious types first - order of basic types determined by running - # full tests on nova project, resulting in the following counts: - # 572754 - # 460353 - # 379632 - # 274610 - # 199918 - # 114200 - # 51817 - # 26164 - # 6491 - # 283 - # 19 - if isinstance(value, _simple_types): - return value - - if isinstance(value, datetime.datetime): - if convert_datetime: - return timeutils.strtime(value) - else: - return value - - # value of itertools.count doesn't get caught by nasty_type_tests - # and results in infinite loop when list(value) is called. - if type(value) == itertools.count: - return six.text_type(value) - - # FIXME(vish): Workaround for LP bug 852095. Without this workaround, - # tests that raise an exception in a mocked method that - # has a @wrap_exception with a notifier will fail. If - # we up the dependency to 0.5.4 (when it is released) we - # can remove this workaround. - if getattr(value, '__module__', None) == 'mox': - return 'mock' - - if level > max_depth: - return '?' - - # The try block may not be necessary after the class check above, - # but just in case ... - try: - recursive = functools.partial(to_primitive, - convert_instances=convert_instances, - convert_datetime=convert_datetime, - level=level, - max_depth=max_depth) - if isinstance(value, dict): - return dict((k, recursive(v)) for k, v in six.iteritems(value)) - elif isinstance(value, (list, tuple)): - return [recursive(lv) for lv in value] - - # It's not clear why xmlrpclib created their own DateTime type, but - # for our purposes, make it a datetime type which is explicitly - # handled - if isinstance(value, xmlrpclib.DateTime): - value = datetime.datetime(*tuple(value.timetuple())[:6]) - - if convert_datetime and isinstance(value, datetime.datetime): - return timeutils.strtime(value) - elif isinstance(value, gettextutils.Message): - return value.data - elif hasattr(value, 'iteritems'): - return recursive(dict(value.iteritems()), level=level + 1) - elif hasattr(value, '__iter__'): - return recursive(list(value)) - elif convert_instances and hasattr(value, '__dict__'): - # Likely an instance of something. Watch for cycles. - # Ignore class member vars. - return recursive(value.__dict__, level=level + 1) - elif netaddr and isinstance(value, netaddr.IPAddress): - return six.text_type(value) - else: - if any(test(value) for test in _nasty_type_tests): - return six.text_type(value) - return value - except TypeError: - # Class objects are tricky since they may define something like - # __iter__ defined but it isn't callable as list(). - return six.text_type(value) - - -def dumps(value, default=to_primitive, **kwargs): - return json.dumps(value, default=default, **kwargs) - - -def loads(s): - return json.loads(s) - - -def load(s): - return json.load(s) - - -try: - import anyjson -except ImportError: - pass -else: - anyjson._modules.append((__name__, 'dumps', TypeError, - 'loads', ValueError, 'load')) - anyjson.force_implementation(__name__) diff --git a/gceapi/openstack/common/log.py b/gceapi/openstack/common/log.py deleted file mode 100644 index cd3769e..0000000 --- a/gceapi/openstack/common/log.py +++ /dev/null @@ -1,657 +0,0 @@ -# Copyright 2011 OpenStack Foundation. -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Openstack logging handler. - -This module adds to logging functionality by adding the option to specify -a context object when calling the various log methods. If the context object -is not specified, default formatting is used. Additionally, an instance uuid -may be passed as part of the log message, which is intended to make it easier -for admins to find messages related to a specific instance. - -It also allows setting of formatting information through conf. - -""" - -import inspect -import itertools -import logging -import logging.config -import logging.handlers -import os -import re -import sys -import traceback - -from oslo.config import cfg -import six -from six import moves - -from gceapi.openstack.common.gettextutils import _ -from gceapi.openstack.common import importutils -from gceapi.openstack.common import jsonutils -from gceapi.openstack.common import local - - -_DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S" - -_SANITIZE_KEYS = ['adminPass', 'admin_pass', 'password', 'admin_password'] - -# NOTE(ldbragst): Let's build a list of regex objects using the list of -# _SANITIZE_KEYS we already have. This way, we only have to add the new key -# to the list of _SANITIZE_KEYS and we can generate regular expressions -# for XML and JSON automatically. -_SANITIZE_PATTERNS = [] -_FORMAT_PATTERNS = [r'(%(key)s\s*[=]\s*[\"\']).*?([\"\'])', - r'(<%(key)s>).*?()', - r'([\"\']%(key)s[\"\']\s*:\s*[\"\']).*?([\"\'])', - r'([\'"].*?%(key)s[\'"]\s*:\s*u?[\'"]).*?([\'"])'] - -for key in _SANITIZE_KEYS: - for pattern in _FORMAT_PATTERNS: - reg_ex = re.compile(pattern % {'key': key}, re.DOTALL) - _SANITIZE_PATTERNS.append(reg_ex) - - -common_cli_opts = [ - cfg.BoolOpt('debug', - short='d', - default=False, - help='Print debugging output (set logging level to ' - 'DEBUG instead of default WARNING level).'), - cfg.BoolOpt('verbose', - short='v', - default=False, - help='Print more verbose output (set logging level to ' - 'INFO instead of default WARNING level).'), -] - -logging_cli_opts = [ - cfg.StrOpt('log-config-append', - metavar='PATH', - deprecated_name='log-config', - help='The name of logging configuration file. It does not ' - 'disable existing loggers, but just appends specified ' - 'logging configuration to any other existing logging ' - 'options. Please see the Python logging module ' - 'documentation for details on logging configuration ' - 'files.'), - cfg.StrOpt('log-format', - default=None, - metavar='FORMAT', - help='DEPRECATED. ' - 'A logging.Formatter log message format string which may ' - 'use any of the available logging.LogRecord attributes. ' - 'This option is deprecated. Please use ' - 'logging_context_format_string and ' - 'logging_default_format_string instead.'), - cfg.StrOpt('log-date-format', - default=_DEFAULT_LOG_DATE_FORMAT, - metavar='DATE_FORMAT', - help='Format string for %%(asctime)s in log records. ' - 'Default: %(default)s'), - cfg.StrOpt('log-file', - metavar='PATH', - deprecated_name='logfile', - help='(Optional) Name of log file to output to. ' - 'If no default is set, logging will go to stdout.'), - cfg.StrOpt('log-dir', - deprecated_name='logdir', - help='(Optional) The base directory used for relative ' - '--log-file paths'), - cfg.BoolOpt('use-syslog', - default=False, - help='Use syslog for logging. ' - 'Existing syslog format is DEPRECATED during I, ' - 'and then will be changed in J to honor RFC5424'), - cfg.BoolOpt('use-syslog-rfc-format', - # TODO(bogdando) remove or use True after existing - # syslog format deprecation in J - default=False, - help='(Optional) Use syslog rfc5424 format for logging. ' - 'If enabled, will add APP-NAME (RFC5424) before the ' - 'MSG part of the syslog message. The old format ' - 'without APP-NAME is deprecated in I, ' - 'and will be removed in J.'), - cfg.StrOpt('syslog-log-facility', - default='LOG_USER', - help='Syslog facility to receive log lines') -] - -generic_log_opts = [ - cfg.BoolOpt('use_stderr', - default=True, - help='Log output to standard error') -] - -log_opts = [ - cfg.StrOpt('logging_context_format_string', - default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s ' - '%(name)s [%(request_id)s %(user_identity)s] ' - '%(instance)s%(message)s', - help='Format string to use for log messages with context'), - cfg.StrOpt('logging_default_format_string', - default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s ' - '%(name)s [-] %(instance)s%(message)s', - help='Format string to use for log messages without context'), - cfg.StrOpt('logging_debug_format_suffix', - default='%(funcName)s %(pathname)s:%(lineno)d', - help='Data to append to log format when level is DEBUG'), - cfg.StrOpt('logging_exception_prefix', - default='%(asctime)s.%(msecs)03d %(process)d TRACE %(name)s ' - '%(instance)s', - help='Prefix each line of exception output with this format'), - cfg.ListOpt('default_log_levels', - default=[ - 'amqp=WARN', - 'amqplib=WARN', - 'boto=WARN', - 'qpid=WARN', - 'sqlalchemy=WARN', - 'suds=INFO', - 'iso8601=WARN', - 'requests.packages.urllib3.connectionpool=WARN' - ], - help='List of logger=LEVEL pairs'), - cfg.BoolOpt('publish_errors', - default=False, - help='Publish error events'), - cfg.BoolOpt('fatal_deprecations', - default=False, - help='Make deprecations fatal'), - - # NOTE(mikal): there are two options here because sometimes we are handed - # a full instance (and could include more information), and other times we - # are just handed a UUID for the instance. - cfg.StrOpt('instance_format', - default='[instance: %(uuid)s] ', - help='If an instance is passed with the log message, format ' - 'it like this'), - cfg.StrOpt('instance_uuid_format', - default='[instance: %(uuid)s] ', - help='If an instance UUID is passed with the log message, ' - 'format it like this'), -] - -CONF = cfg.CONF -CONF.register_cli_opts(common_cli_opts) -CONF.register_cli_opts(logging_cli_opts) -CONF.register_opts(generic_log_opts) -CONF.register_opts(log_opts) - -# our new audit level -# NOTE(jkoelker) Since we synthesized an audit level, make the logging -# module aware of it so it acts like other levels. -logging.AUDIT = logging.INFO + 1 -logging.addLevelName(logging.AUDIT, 'AUDIT') - - -try: - NullHandler = logging.NullHandler -except AttributeError: # NOTE(jkoelker) NullHandler added in Python 2.7 - class NullHandler(logging.Handler): - def handle(self, record): - pass - - def emit(self, record): - pass - - def createLock(self): - self.lock = None - - -def _dictify_context(context): - if context is None: - return None - if not isinstance(context, dict) and getattr(context, 'to_dict', None): - context = context.to_dict() - return context - - -def _get_binary_name(): - return os.path.basename(inspect.stack()[-1][1]) - - -def _get_log_file_path(binary=None): - logfile = CONF.log_file - logdir = CONF.log_dir - - if logfile and not logdir: - return logfile - - if logfile and logdir: - return os.path.join(logdir, logfile) - - if logdir: - binary = binary or _get_binary_name() - return '%s.log' % (os.path.join(logdir, binary),) - - return None - - -def mask_password(message, secret="***"): - """Replace password with 'secret' in message. - - :param message: The string which includes security information. - :param secret: value with which to replace passwords. - :returns: The unicode value of message with the password fields masked. - - For example: - - >>> mask_password("'adminPass' : 'aaaaa'") - "'adminPass' : '***'" - >>> mask_password("'admin_pass' : 'aaaaa'") - "'admin_pass' : '***'" - >>> mask_password('"password" : "aaaaa"') - '"password" : "***"' - >>> mask_password("'original_password' : 'aaaaa'") - "'original_password' : '***'" - >>> mask_password("u'original_password' : u'aaaaa'") - "u'original_password' : u'***'" - """ - message = six.text_type(message) - - # NOTE(ldbragst): Check to see if anything in message contains any key - # specified in _SANITIZE_KEYS, if not then just return the message since - # we don't have to mask any passwords. - if not any(key in message for key in _SANITIZE_KEYS): - return message - - secret = r'\g<1>' + secret + r'\g<2>' - for pattern in _SANITIZE_PATTERNS: - message = re.sub(pattern, secret, message) - return message - - -class BaseLoggerAdapter(logging.LoggerAdapter): - - def audit(self, msg, *args, **kwargs): - self.log(logging.AUDIT, msg, *args, **kwargs) - - -class LazyAdapter(BaseLoggerAdapter): - def __init__(self, name='unknown', version='unknown'): - self._logger = None - self.extra = {} - self.name = name - self.version = version - - @property - def logger(self): - if not self._logger: - self._logger = getLogger(self.name, self.version) - return self._logger - - -class ContextAdapter(BaseLoggerAdapter): - warn = logging.LoggerAdapter.warning - - def __init__(self, logger, project_name, version_string): - self.logger = logger - self.project = project_name - self.version = version_string - - @property - def handlers(self): - return self.logger.handlers - - def deprecated(self, msg, *args, **kwargs): - stdmsg = _("Deprecated: %s") % msg - if CONF.fatal_deprecations: - self.critical(stdmsg, *args, **kwargs) - raise DeprecatedConfig(msg=stdmsg) - else: - self.warn(stdmsg, *args, **kwargs) - - def process(self, msg, kwargs): - # NOTE(mrodden): catch any Message/other object and - # coerce to unicode before they can get - # to the python logging and possibly - # cause string encoding trouble - if not isinstance(msg, six.string_types): - msg = six.text_type(msg) - - if 'extra' not in kwargs: - kwargs['extra'] = {} - extra = kwargs['extra'] - - context = kwargs.pop('context', None) - if not context: - context = getattr(local.store, 'context', None) - if context: - extra.update(_dictify_context(context)) - - instance = kwargs.pop('instance', None) - instance_uuid = (extra.get('instance_uuid', None) or - kwargs.pop('instance_uuid', None)) - instance_extra = '' - if instance: - instance_extra = CONF.instance_format % instance - elif instance_uuid: - instance_extra = (CONF.instance_uuid_format - % {'uuid': instance_uuid}) - extra['instance'] = instance_extra - - extra.setdefault('user_identity', kwargs.pop('user_identity', None)) - - extra['project'] = self.project - extra['version'] = self.version - extra['extra'] = extra.copy() - return msg, kwargs - - -class JSONFormatter(logging.Formatter): - def __init__(self, fmt=None, datefmt=None): - # NOTE(jkoelker) we ignore the fmt argument, but its still there - # since logging.config.fileConfig passes it. - self.datefmt = datefmt - - def formatException(self, ei, strip_newlines=True): - lines = traceback.format_exception(*ei) - if strip_newlines: - lines = [moves.filter( - lambda x: x, - line.rstrip().splitlines()) for line in lines] - lines = list(itertools.chain(*lines)) - return lines - - def format(self, record): - message = {'message': record.getMessage(), - 'asctime': self.formatTime(record, self.datefmt), - 'name': record.name, - 'msg': record.msg, - 'args': record.args, - 'levelname': record.levelname, - 'levelno': record.levelno, - 'pathname': record.pathname, - 'filename': record.filename, - 'module': record.module, - 'lineno': record.lineno, - 'funcname': record.funcName, - 'created': record.created, - 'msecs': record.msecs, - 'relative_created': record.relativeCreated, - 'thread': record.thread, - 'thread_name': record.threadName, - 'process_name': record.processName, - 'process': record.process, - 'traceback': None} - - if hasattr(record, 'extra'): - message['extra'] = record.extra - - if record.exc_info: - message['traceback'] = self.formatException(record.exc_info) - - return jsonutils.dumps(message) - - -def _create_logging_excepthook(product_name): - def logging_excepthook(exc_type, value, tb): - extra = {} - if CONF.verbose or CONF.debug: - extra['exc_info'] = (exc_type, value, tb) - getLogger(product_name).critical( - "".join(traceback.format_exception_only(exc_type, value)), - **extra) - return logging_excepthook - - -class LogConfigError(Exception): - - message = _('Error loading logging config %(log_config)s: %(err_msg)s') - - def __init__(self, log_config, err_msg): - self.log_config = log_config - self.err_msg = err_msg - - def __str__(self): - return self.message % dict(log_config=self.log_config, - err_msg=self.err_msg) - - -def _load_log_config(log_config_append): - try: - logging.config.fileConfig(log_config_append, - disable_existing_loggers=False) - except moves.configparser.Error as exc: - raise LogConfigError(log_config_append, str(exc)) - - -def setup(product_name): - """Setup logging.""" - if CONF.log_config_append: - _load_log_config(CONF.log_config_append) - else: - _setup_logging_from_conf() - sys.excepthook = _create_logging_excepthook(product_name) - - -def set_defaults(logging_context_format_string): - cfg.set_defaults(log_opts, - logging_context_format_string= - logging_context_format_string) - - -def _find_facility_from_conf(): - facility_names = logging.handlers.SysLogHandler.facility_names - facility = getattr(logging.handlers.SysLogHandler, - CONF.syslog_log_facility, - None) - - if facility is None and CONF.syslog_log_facility in facility_names: - facility = facility_names.get(CONF.syslog_log_facility) - - if facility is None: - valid_facilities = facility_names.keys() - consts = ['LOG_AUTH', 'LOG_AUTHPRIV', 'LOG_CRON', 'LOG_DAEMON', - 'LOG_FTP', 'LOG_KERN', 'LOG_LPR', 'LOG_MAIL', 'LOG_NEWS', - 'LOG_AUTH', 'LOG_SYSLOG', 'LOG_USER', 'LOG_UUCP', - 'LOG_LOCAL0', 'LOG_LOCAL1', 'LOG_LOCAL2', 'LOG_LOCAL3', - 'LOG_LOCAL4', 'LOG_LOCAL5', 'LOG_LOCAL6', 'LOG_LOCAL7'] - valid_facilities.extend(consts) - raise TypeError(_('syslog facility must be one of: %s') % - ', '.join("'%s'" % fac - for fac in valid_facilities)) - - return facility - - -class RFCSysLogHandler(logging.handlers.SysLogHandler): - def __init__(self, *args, **kwargs): - self.binary_name = _get_binary_name() - super(RFCSysLogHandler, self).__init__(*args, **kwargs) - - def format(self, record): - msg = super(RFCSysLogHandler, self).format(record) - msg = self.binary_name + ' ' + msg - return msg - - -def _setup_logging_from_conf(): - log_root = getLogger(None).logger - for handler in log_root.handlers: - log_root.removeHandler(handler) - - if CONF.use_syslog: - facility = _find_facility_from_conf() - # TODO(bogdando) use the format provided by RFCSysLogHandler - # after existing syslog format deprecation in J - if CONF.use_syslog_rfc_format: - syslog = RFCSysLogHandler(address='/dev/log', - facility=facility) - else: - syslog = logging.handlers.SysLogHandler(address='/dev/log', - facility=facility) - log_root.addHandler(syslog) - - logpath = _get_log_file_path() - if logpath: - filelog = logging.handlers.WatchedFileHandler(logpath) - log_root.addHandler(filelog) - - if CONF.use_stderr: - streamlog = ColorHandler() - log_root.addHandler(streamlog) - - elif not logpath: - # pass sys.stdout as a positional argument - # python2.6 calls the argument strm, in 2.7 it's stream - streamlog = logging.StreamHandler(sys.stdout) - log_root.addHandler(streamlog) - - if CONF.publish_errors: - handler = importutils.import_object( - "gceapi.openstack.common.log_handler.PublishErrorsHandler", - logging.ERROR) - log_root.addHandler(handler) - - datefmt = CONF.log_date_format - for handler in log_root.handlers: - # NOTE(alaski): CONF.log_format overrides everything currently. This - # should be deprecated in favor of context aware formatting. - if CONF.log_format: - handler.setFormatter(logging.Formatter(fmt=CONF.log_format, - datefmt=datefmt)) - log_root.info('Deprecated: log_format is now deprecated and will ' - 'be removed in the next release') - else: - handler.setFormatter(ContextFormatter(datefmt=datefmt)) - - if CONF.debug: - log_root.setLevel(logging.DEBUG) - elif CONF.verbose: - log_root.setLevel(logging.INFO) - else: - log_root.setLevel(logging.WARNING) - - for pair in CONF.default_log_levels: - mod, _sep, level_name = pair.partition('=') - level = logging.getLevelName(level_name) - logger = logging.getLogger(mod) - logger.setLevel(level) - -_loggers = {} - - -def getLogger(name='unknown', version='unknown'): - if name not in _loggers: - _loggers[name] = ContextAdapter(logging.getLogger(name), - name, - version) - return _loggers[name] - - -def getLazyLogger(name='unknown', version='unknown'): - """Returns lazy logger. - - Creates a pass-through logger that does not create the real logger - until it is really needed and delegates all calls to the real logger - once it is created. - """ - return LazyAdapter(name, version) - - -class WritableLogger(object): - """A thin wrapper that responds to `write` and logs.""" - - def __init__(self, logger, level=logging.INFO): - self.logger = logger - self.level = level - - def write(self, msg): - self.logger.log(self.level, msg.rstrip()) - - -class ContextFormatter(logging.Formatter): - """A context.RequestContext aware formatter configured through flags. - - The flags used to set format strings are: logging_context_format_string - and logging_default_format_string. You can also specify - logging_debug_format_suffix to append extra formatting if the log level is - debug. - - For information about what variables are available for the formatter see: - http://docs.python.org/library/logging.html#formatter - - """ - - def format(self, record): - """Uses contextstring if request_id is set, otherwise default.""" - # NOTE(sdague): default the fancier formatting params - # to an empty string so we don't throw an exception if - # they get used - for key in ('instance', 'color'): - if key not in record.__dict__: - record.__dict__[key] = '' - - if record.__dict__.get('request_id', None): - self._fmt = CONF.logging_context_format_string - else: - self._fmt = CONF.logging_default_format_string - - if (record.levelno == logging.DEBUG and - CONF.logging_debug_format_suffix): - self._fmt += " " + CONF.logging_debug_format_suffix - - # Cache this on the record, Logger will respect our formatted copy - if record.exc_info: - record.exc_text = self.formatException(record.exc_info, record) - return logging.Formatter.format(self, record) - - def formatException(self, exc_info, record=None): - """Format exception output with CONF.logging_exception_prefix.""" - if not record: - return logging.Formatter.formatException(self, exc_info) - - stringbuffer = moves.StringIO() - traceback.print_exception(exc_info[0], exc_info[1], exc_info[2], - None, stringbuffer) - lines = stringbuffer.getvalue().split('\n') - stringbuffer.close() - - if CONF.logging_exception_prefix.find('%(asctime)') != -1: - record.asctime = self.formatTime(record, self.datefmt) - - formatted_lines = [] - for line in lines: - pl = CONF.logging_exception_prefix % record.__dict__ - fl = '%s%s' % (pl, line) - formatted_lines.append(fl) - return '\n'.join(formatted_lines) - - -class ColorHandler(logging.StreamHandler): - LEVEL_COLORS = { - logging.DEBUG: '\033[00;32m', # GREEN - logging.INFO: '\033[00;36m', # CYAN - logging.AUDIT: '\033[01;36m', # BOLD CYAN - logging.WARN: '\033[01;33m', # BOLD YELLOW - logging.ERROR: '\033[01;31m', # BOLD RED - logging.CRITICAL: '\033[01;31m', # BOLD RED - } - - def format(self, record): - record.color = self.LEVEL_COLORS[record.levelno] - return logging.StreamHandler.format(self, record) - - -class DeprecatedConfig(Exception): - message = _("Fatal call to deprecated config: %(msg)s") - - def __init__(self, msg): - super(Exception, self).__init__(self.message % dict(msg=msg)) diff --git a/gceapi/openstack/common/py3kcompat/__init__.py b/gceapi/openstack/common/py3kcompat/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/gceapi/openstack/common/py3kcompat/urlutils.py b/gceapi/openstack/common/py3kcompat/urlutils.py deleted file mode 100644 index 84e457a..0000000 --- a/gceapi/openstack/common/py3kcompat/urlutils.py +++ /dev/null @@ -1,67 +0,0 @@ -# -# Copyright 2013 Canonical Ltd. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# - -""" -Python2/Python3 compatibility layer for OpenStack -""" - -import six - -if six.PY3: - # python3 - import urllib.error - import urllib.parse - import urllib.request - - urlencode = urllib.parse.urlencode - urljoin = urllib.parse.urljoin - quote = urllib.parse.quote - quote_plus = urllib.parse.quote_plus - parse_qsl = urllib.parse.parse_qsl - unquote = urllib.parse.unquote - unquote_plus = urllib.parse.unquote_plus - urlparse = urllib.parse.urlparse - urlsplit = urllib.parse.urlsplit - urlunsplit = urllib.parse.urlunsplit - SplitResult = urllib.parse.SplitResult - - urlopen = urllib.request.urlopen - URLError = urllib.error.URLError - pathname2url = urllib.request.pathname2url -else: - # python2 - import urllib - import urllib2 - import urlparse - - urlencode = urllib.urlencode - quote = urllib.quote - quote_plus = urllib.quote_plus - unquote = urllib.unquote - unquote_plus = urllib.unquote_plus - - parse = urlparse - parse_qsl = parse.parse_qsl - urljoin = parse.urljoin - urlparse = parse.urlparse - urlsplit = parse.urlsplit - urlunsplit = parse.urlunsplit - SplitResult = parse.SplitResult - - urlopen = urllib2.urlopen - URLError = urllib2.URLError - pathname2url = urllib.pathname2url diff --git a/gceapi/openstack/common/test.py b/gceapi/openstack/common/test.py deleted file mode 100644 index c406f5d..0000000 --- a/gceapi/openstack/common/test.py +++ /dev/null @@ -1,88 +0,0 @@ -# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Common utilities used in testing""" - -import logging -import os -import tempfile - -import fixtures -import testtools - -_TRUE_VALUES = ('True', 'true', '1', 'yes') -_LOG_FORMAT = "%(levelname)8s [%(name)s] %(message)s" - - -class BaseTestCase(testtools.TestCase): - - def setUp(self): - super(BaseTestCase, self).setUp() - self._set_timeout() - self._fake_output() - self._fake_logs() - self.useFixture(fixtures.NestedTempfile()) - self.useFixture(fixtures.TempHomeDir()) - self.tempdirs = [] - - def _set_timeout(self): - test_timeout = os.environ.get('OS_TEST_TIMEOUT', 0) - try: - test_timeout = int(test_timeout) - except ValueError: - # If timeout value is invalid do not set a timeout. - test_timeout = 0 - if test_timeout > 0: - self.useFixture(fixtures.Timeout(test_timeout, gentle=True)) - - def _fake_output(self): - if os.environ.get('OS_STDOUT_CAPTURE') in _TRUE_VALUES: - stdout = self.useFixture(fixtures.StringStream('stdout')).stream - self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout)) - if os.environ.get('OS_STDERR_CAPTURE') in _TRUE_VALUES: - stderr = self.useFixture(fixtures.StringStream('stderr')).stream - self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr)) - - def _fake_logs(self): - if os.environ.get('OS_DEBUG') in _TRUE_VALUES: - level = logging.DEBUG - else: - level = logging.INFO - capture_logs = os.environ.get('OS_LOG_CAPTURE') in _TRUE_VALUES - if capture_logs: - self.useFixture( - fixtures.FakeLogger( - format=_LOG_FORMAT, - level=level, - nuke_handlers=capture_logs, - ) - ) - else: - logging.basicConfig(format=_LOG_FORMAT, level=level) - - def create_tempfiles(self, files, ext='.conf'): - tempfiles = [] - for (basename, contents) in files: - if not os.path.isabs(basename): - (fd, path) = tempfile.mkstemp(prefix=basename, suffix=ext) - else: - path = basename + ext - fd = os.open(path, os.O_CREAT | os.O_WRONLY) - tempfiles.append(path) - try: - os.write(fd, contents) - finally: - os.close(fd) - return tempfiles diff --git a/gceapi/openstack/common/timeutils.py b/gceapi/openstack/common/timeutils.py deleted file mode 100644 index 52688a0..0000000 --- a/gceapi/openstack/common/timeutils.py +++ /dev/null @@ -1,210 +0,0 @@ -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Time related utilities and helper functions. -""" - -import calendar -import datetime -import time - -import iso8601 -import six - - -# ISO 8601 extended time format with microseconds -_ISO8601_TIME_FORMAT_SUBSECOND = '%Y-%m-%dT%H:%M:%S.%f' -_ISO8601_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S' -PERFECT_TIME_FORMAT = _ISO8601_TIME_FORMAT_SUBSECOND - - -def isotime(at=None, subsecond=False): - """Stringify time in ISO 8601 format.""" - if not at: - at = utcnow() - st = at.strftime(_ISO8601_TIME_FORMAT - if not subsecond - else _ISO8601_TIME_FORMAT_SUBSECOND) - tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC' - st += ('Z' if tz == 'UTC' else tz) - return st - - -def parse_isotime(timestr): - """Parse time from ISO 8601 format.""" - try: - return iso8601.parse_date(timestr) - except iso8601.ParseError as e: - raise ValueError(six.text_type(e)) - except TypeError as e: - raise ValueError(six.text_type(e)) - - -def strtime(at=None, fmt=PERFECT_TIME_FORMAT): - """Returns formatted utcnow.""" - if not at: - at = utcnow() - return at.strftime(fmt) - - -def parse_strtime(timestr, fmt=PERFECT_TIME_FORMAT): - """Turn a formatted time back into a datetime.""" - return datetime.datetime.strptime(timestr, fmt) - - -def normalize_time(timestamp): - """Normalize time in arbitrary timezone to UTC naive object.""" - offset = timestamp.utcoffset() - if offset is None: - return timestamp - return timestamp.replace(tzinfo=None) - offset - - -def is_older_than(before, seconds): - """Return True if before is older than seconds.""" - if isinstance(before, six.string_types): - before = parse_strtime(before).replace(tzinfo=None) - else: - before = before.replace(tzinfo=None) - - return utcnow() - before > datetime.timedelta(seconds=seconds) - - -def is_newer_than(after, seconds): - """Return True if after is newer than seconds.""" - if isinstance(after, six.string_types): - after = parse_strtime(after).replace(tzinfo=None) - else: - after = after.replace(tzinfo=None) - - return after - utcnow() > datetime.timedelta(seconds=seconds) - - -def utcnow_ts(): - """Timestamp version of our utcnow function.""" - if utcnow.override_time is None: - # NOTE(kgriffs): This is several times faster - # than going through calendar.timegm(...) - return int(time.time()) - - return calendar.timegm(utcnow().timetuple()) - - -def utcnow(): - """Overridable version of utils.utcnow.""" - if utcnow.override_time: - try: - return utcnow.override_time.pop(0) - except AttributeError: - return utcnow.override_time - return datetime.datetime.utcnow() - - -def iso8601_from_timestamp(timestamp): - """Returns a iso8601 formatted date from timestamp.""" - return isotime(datetime.datetime.utcfromtimestamp(timestamp)) - - -utcnow.override_time = None - - -def set_time_override(override_time=None): - """Overrides utils.utcnow. - - Make it return a constant time or a list thereof, one at a time. - - :param override_time: datetime instance or list thereof. If not - given, defaults to the current UTC time. - """ - utcnow.override_time = override_time or datetime.datetime.utcnow() - - -def advance_time_delta(timedelta): - """Advance overridden time using a datetime.timedelta.""" - assert(not utcnow.override_time is None) - try: - for dt in utcnow.override_time: - dt += timedelta - except TypeError: - utcnow.override_time += timedelta - - -def advance_time_seconds(seconds): - """Advance overridden time by seconds.""" - advance_time_delta(datetime.timedelta(0, seconds)) - - -def clear_time_override(): - """Remove the overridden time.""" - utcnow.override_time = None - - -def marshall_now(now=None): - """Make an rpc-safe datetime with microseconds. - - Note: tzinfo is stripped, but not required for relative times. - """ - if not now: - now = utcnow() - return dict(day=now.day, month=now.month, year=now.year, hour=now.hour, - minute=now.minute, second=now.second, - microsecond=now.microsecond) - - -def unmarshall_time(tyme): - """Unmarshall a datetime dict.""" - return datetime.datetime(day=tyme['day'], - month=tyme['month'], - year=tyme['year'], - hour=tyme['hour'], - minute=tyme['minute'], - second=tyme['second'], - microsecond=tyme['microsecond']) - - -def delta_seconds(before, after): - """Return the difference between two timing objects. - - Compute the difference in seconds between two date, time, or - datetime objects (as a float, to microsecond resolution). - """ - delta = after - before - return total_seconds(delta) - - -def total_seconds(delta): - """Return the total seconds of datetime.timedelta object. - - Compute total seconds of datetime.timedelta, datetime.timedelta - doesn't have method total_seconds in Python2.6, calculate it manually. - """ - try: - return delta.total_seconds() - except AttributeError: - return ((delta.days * 24 * 3600) + delta.seconds + - float(delta.microseconds) / (10 ** 6)) - - -def is_soon(dt, window): - """Determines if time is going to happen in the next window seconds. - - :param dt: the time - :param window: minimum seconds to remain to consider the time not soon - - :return: True if expiration is within the given duration - """ - soon = (utcnow() + datetime.timedelta(seconds=window)) - return normalize_time(dt) <= soon diff --git a/gceapi/paths.py b/gceapi/paths.py index 7069f95..e4158c4 100644 --- a/gceapi/paths.py +++ b/gceapi/paths.py @@ -1,25 +1,21 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# All Rights Reserved. -# Copyright 2012 Red Hat, Inc. +# Copyright 2014 +# The Cloudscaling Group, Inc. # -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 # -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. import os +import sys -from oslo.config import cfg +from oslo_config import cfg path_opts = [ cfg.StrOpt('pybasedir', @@ -27,7 +23,7 @@ path_opts = [ '../')), help='Directory where the gceapi python module is installed'), cfg.StrOpt('bindir', - default='$pybasedir/bin', + default=os.path.join(sys.prefix, 'local', 'bin'), help='Directory where gceapi binaries are installed'), cfg.StrOpt('state_path', default='$pybasedir', diff --git a/gceapi/service.py b/gceapi/service.py index 70bd4b0..7e6d0ce 100644 --- a/gceapi/service.py +++ b/gceapi/service.py @@ -24,12 +24,12 @@ import sys import eventlet import greenlet -from oslo.config import cfg +from oslo_config import cfg +from oslo_log import log as logging +from oslo_utils import importutils +from gceapi.i18n import _ from gceapi.openstack.common import eventlet_backdoor -from gceapi.openstack.common.gettextutils import _ -from gceapi.openstack.common import importutils -from gceapi.openstack.common import log as logging from gceapi import wsgi LOG = logging.getLogger(__name__) diff --git a/gceapi/tests/__init__.py b/gceapi/tests/__init__.py index 9111b80..e69de29 100644 --- a/gceapi/tests/__init__.py +++ b/gceapi/tests/__init__.py @@ -1,30 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -:mod:`gceapi.tests` -- Gceapi Unittests -===================================================== - -.. automodule:: gceapi.tests - :platform: Unix -""" - -# See http://code.google.com/p/python-nose/issues/detail?id=373 -# The code below enables nosetests to work with i18n _() blocks -import __builtin__ -setattr(__builtin__, '_', lambda x: x) diff --git a/gceapi/tests/api/__init__.py b/gceapi/tests/unit/__init__.py similarity index 57% rename from gceapi/tests/api/__init__.py rename to gceapi/tests/unit/__init__.py index d65c689..9111b80 100644 --- a/gceapi/tests/api/__init__.py +++ b/gceapi/tests/unit/__init__.py @@ -1,6 +1,7 @@ # vim: tabstop=4 shiftwidth=4 softtabstop=4 -# Copyright 2011 OpenStack LLC. +# Copyright 2010 United States Government as represented by the +# Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may @@ -14,3 +15,16 @@ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. + +""" +:mod:`gceapi.tests` -- Gceapi Unittests +===================================================== + +.. automodule:: gceapi.tests + :platform: Unix +""" + +# See http://code.google.com/p/python-nose/issues/detail?id=373 +# The code below enables nosetests to work with i18n _() blocks +import __builtin__ +setattr(__builtin__, '_', lambda x: x) diff --git a/gceapi/openstack/common/db/__init__.py b/gceapi/tests/unit/api/__init__.py similarity index 100% rename from gceapi/openstack/common/db/__init__.py rename to gceapi/tests/unit/api/__init__.py diff --git a/gceapi/tests/api/common.py b/gceapi/tests/unit/api/common.py similarity index 89% rename from gceapi/tests/api/common.py rename to gceapi/tests/unit/api/common.py index aabf6d9..8712a42 100644 --- a/gceapi/tests/api/common.py +++ b/gceapi/tests/unit/api/common.py @@ -20,18 +20,17 @@ from glanceclient import client as glanceclient from keystoneclient.v2_0 import client as kc from neutronclient.v2_0 import client as neutronclient from novaclient import client as novaclient -from novaclient import shell as novashell +from oslo_utils import timeutils import gceapi.api -from gceapi.openstack.common import timeutils -from gceapi import test -from gceapi.tests.api import fake_cinder_client -from gceapi.tests.api import fake_db -from gceapi.tests.api import fake_glance_client -from gceapi.tests.api import fake_keystone_client -from gceapi.tests.api import fake_neutron_client -from gceapi.tests.api import fake_nova_client -from gceapi.tests.api import fake_request +from gceapi.tests.unit.api import fake_cinder_client +from gceapi.tests.unit.api import fake_db +from gceapi.tests.unit.api import fake_glance_client +from gceapi.tests.unit.api import fake_keystone_client +from gceapi.tests.unit.api import fake_neutron_client +from gceapi.tests.unit.api import fake_nova_client +from gceapi.tests.unit.api import fake_request +from gceapi.tests.unit import test COMMON_OPERATION = { @@ -101,8 +100,6 @@ class GCEControllerTest(test.TestCase): fake_glance_client.FakeGlanceClient) self.stubs.Set(cinderclient, "Client", fake_cinder_client.FakeCinderClient) - self.stubs.Set(novashell.OpenStackComputeShell, '_discover_extensions', - fake_nova_client.fake_discover_extensions) self.stubs.Set(novaclient, 'Client', fake_nova_client.FakeNovaClient) self.db_fixture = self.useFixture(fake_db.DBFixture(self.stubs)) self.stubs.Set( diff --git a/gceapi/tests/api/fake_cinder_client.py b/gceapi/tests/unit/api/fake_cinder_client.py similarity index 99% rename from gceapi/tests/api/fake_cinder_client.py rename to gceapi/tests/unit/api/fake_cinder_client.py index ab3d628..8f15566 100644 --- a/gceapi/tests/api/fake_cinder_client.py +++ b/gceapi/tests/unit/api/fake_cinder_client.py @@ -16,8 +16,8 @@ import copy from cinderclient import exceptions as exc -from gceapi.tests.api import fake_request -from gceapi.tests.api import utils +from gceapi.tests.unit.api import fake_request +from gceapi.tests.unit.api import utils FAKE_DISKS = [utils.FakeObject({ diff --git a/gceapi/tests/api/fake_db.py b/gceapi/tests/unit/api/fake_db.py similarity index 100% rename from gceapi/tests/api/fake_db.py rename to gceapi/tests/unit/api/fake_db.py diff --git a/gceapi/tests/api/fake_glance_client.py b/gceapi/tests/unit/api/fake_glance_client.py similarity index 96% rename from gceapi/tests/api/fake_glance_client.py rename to gceapi/tests/unit/api/fake_glance_client.py index d8b1bc0..f5ba5a3 100644 --- a/gceapi/tests/api/fake_glance_client.py +++ b/gceapi/tests/unit/api/fake_glance_client.py @@ -15,10 +15,10 @@ import copy from glanceclient import exc as glance_exc +from oslo_utils import timeutils -from gceapi.openstack.common import timeutils -from gceapi.tests.api import fake_request -from gceapi.tests.api import utils +from gceapi.tests.unit.api import fake_request +from gceapi.tests.unit.api import utils _TIMESTAMP = timeutils.parse_isotime('2013-08-01T11:30:25') diff --git a/gceapi/tests/api/fake_keystone_client.py b/gceapi/tests/unit/api/fake_keystone_client.py similarity index 91% rename from gceapi/tests/api/fake_keystone_client.py rename to gceapi/tests/unit/api/fake_keystone_client.py index a6bf907..61c5cec 100644 --- a/gceapi/tests/api/fake_keystone_client.py +++ b/gceapi/tests/unit/api/fake_keystone_client.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from gceapi.tests.api import fake_request -from gceapi.tests.api import utils +from gceapi.tests.unit.api import fake_request +from gceapi.tests.unit.api import utils FAKE_PROJECTS = [utils.FakeObject({ diff --git a/gceapi/tests/api/fake_neutron_client.py b/gceapi/tests/unit/api/fake_neutron_client.py similarity index 99% rename from gceapi/tests/api/fake_neutron_client.py rename to gceapi/tests/unit/api/fake_neutron_client.py index e3673d0..0b7b141 100644 --- a/gceapi/tests/api/fake_neutron_client.py +++ b/gceapi/tests/unit/api/fake_neutron_client.py @@ -15,7 +15,7 @@ import copy import uuid -from gceapi.tests.api import fake_request +from gceapi.tests.unit.api import fake_request FAKE_NETWORKS = { diff --git a/gceapi/tests/api/fake_nova_client.py b/gceapi/tests/unit/api/fake_nova_client.py similarity index 99% rename from gceapi/tests/api/fake_nova_client.py rename to gceapi/tests/unit/api/fake_nova_client.py index 9f1a17b..315ff0b 100644 --- a/gceapi/tests/api/fake_nova_client.py +++ b/gceapi/tests/unit/api/fake_nova_client.py @@ -19,9 +19,9 @@ import uuid from novaclient import client as novaclient from gceapi.api import base_api -from gceapi.openstack.common.gettextutils import _ -from gceapi.tests.api import fake_request -from gceapi.tests.api import utils +from gceapi.i18n import _ +from gceapi.tests.unit.api import fake_request +from gceapi.tests.unit.api import utils FAKE_DETAILED_ZONES = [utils.FakeObject({ diff --git a/gceapi/tests/api/fake_request.py b/gceapi/tests/unit/api/fake_request.py similarity index 100% rename from gceapi/tests/api/fake_request.py rename to gceapi/tests/unit/api/fake_request.py diff --git a/gceapi/tests/api/test_addresses.py b/gceapi/tests/unit/api/test_addresses.py similarity index 99% rename from gceapi/tests/api/test_addresses.py rename to gceapi/tests/unit/api/test_addresses.py index a70b8a6..03faec3 100644 --- a/gceapi/tests/api/test_addresses.py +++ b/gceapi/tests/unit/api/test_addresses.py @@ -13,7 +13,7 @@ # limitations under the License. from gceapi.api import addresses -from gceapi.tests.api import common +from gceapi.tests.unit.api import common EXPECTED_ADDRESSES = [{ "kind": "compute#address", diff --git a/gceapi/tests/api/test_disks.py b/gceapi/tests/unit/api/test_disks.py similarity index 99% rename from gceapi/tests/api/test_disks.py rename to gceapi/tests/unit/api/test_disks.py index b183d93..e8fe6f4 100644 --- a/gceapi/tests/api/test_disks.py +++ b/gceapi/tests/unit/api/test_disks.py @@ -14,7 +14,7 @@ import copy -from gceapi.tests.api import common +from gceapi.tests.unit.api import common EXPECTED_DISK_1 = { diff --git a/gceapi/tests/api/test_fields.py b/gceapi/tests/unit/api/test_fields.py similarity index 98% rename from gceapi/tests/api/test_fields.py rename to gceapi/tests/unit/api/test_fields.py index b7e0895..7727b72 100644 --- a/gceapi/tests/api/test_fields.py +++ b/gceapi/tests/unit/api/test_fields.py @@ -13,7 +13,7 @@ # limitations under the License. from gceapi.api import utils -from gceapi import test +from gceapi.tests.unit import test class FieldsTest(test.TestCase): diff --git a/gceapi/tests/api/test_firewalls.py b/gceapi/tests/unit/api/test_firewalls.py similarity index 99% rename from gceapi/tests/api/test_firewalls.py rename to gceapi/tests/unit/api/test_firewalls.py index 1a03f91..39884af 100644 --- a/gceapi/tests/api/test_firewalls.py +++ b/gceapi/tests/unit/api/test_firewalls.py @@ -15,7 +15,7 @@ import copy import gceapi.context -from gceapi.tests.api import common +from gceapi.tests.unit.api import common DEFAULT_FIREWALL = { diff --git a/gceapi/tests/api/test_images.py b/gceapi/tests/unit/api/test_images.py similarity index 99% rename from gceapi/tests/api/test_images.py rename to gceapi/tests/unit/api/test_images.py index 35c2fdd..c1d8fc4 100644 --- a/gceapi/tests/api/test_images.py +++ b/gceapi/tests/unit/api/test_images.py @@ -14,7 +14,7 @@ import copy -from gceapi.tests.api import common +from gceapi.tests.unit.api import common EXPECTED_IMAGE_1 = { "kind": "compute#image", diff --git a/gceapi/tests/api/test_instances.py b/gceapi/tests/unit/api/test_instances.py similarity index 99% rename from gceapi/tests/api/test_instances.py rename to gceapi/tests/unit/api/test_instances.py index 1b899b9..24ac120 100644 --- a/gceapi/tests/api/test_instances.py +++ b/gceapi/tests/unit/api/test_instances.py @@ -14,7 +14,7 @@ import copy -from gceapi.tests.api import common +from gceapi.tests.unit.api import common EXPECTED_INSTANCES = [{ "kind": "compute#instance", diff --git a/gceapi/tests/api/test_machine_types.py b/gceapi/tests/unit/api/test_machine_types.py similarity index 99% rename from gceapi/tests/api/test_machine_types.py rename to gceapi/tests/unit/api/test_machine_types.py index c109fc0..cec36e7 100644 --- a/gceapi/tests/api/test_machine_types.py +++ b/gceapi/tests/unit/api/test_machine_types.py @@ -15,7 +15,7 @@ import copy from gceapi.api import machine_types -from gceapi.tests.api import common +from gceapi.tests.unit.api import common EXPECTED_FLAVORS = [{ diff --git a/gceapi/tests/api/test_networks.py b/gceapi/tests/unit/api/test_networks.py similarity index 99% rename from gceapi/tests/api/test_networks.py rename to gceapi/tests/unit/api/test_networks.py index 305d831..d0b853f 100644 --- a/gceapi/tests/api/test_networks.py +++ b/gceapi/tests/unit/api/test_networks.py @@ -13,7 +13,7 @@ # limitations under the License. from gceapi.api import networks -from gceapi.tests.api import common +from gceapi.tests.unit.api import common EXPECTED_NETWORKS = [{ diff --git a/gceapi/tests/api/test_operations.py b/gceapi/tests/unit/api/test_operations.py similarity index 99% rename from gceapi/tests/api/test_operations.py rename to gceapi/tests/unit/api/test_operations.py index 6a001c5..f34844b 100644 --- a/gceapi/tests/api/test_operations.py +++ b/gceapi/tests/unit/api/test_operations.py @@ -13,7 +13,7 @@ # limitations under the License. from gceapi.api import operations -from gceapi.tests.api import common +from gceapi.tests.unit.api import common FAKE_ADD_INSTANCE = { u'status': u'RUNNING', diff --git a/gceapi/tests/api/test_projects.py b/gceapi/tests/unit/api/test_projects.py similarity index 98% rename from gceapi/tests/api/test_projects.py rename to gceapi/tests/unit/api/test_projects.py index 56c8811..92ddb4a 100644 --- a/gceapi/tests/api/test_projects.py +++ b/gceapi/tests/unit/api/test_projects.py @@ -13,7 +13,7 @@ # limitations under the License. from gceapi.api import projects -from gceapi.tests.api import common +from gceapi.tests.unit.api import common EXPECTED_PROJECT = { diff --git a/gceapi/tests/api/test_regions.py b/gceapi/tests/unit/api/test_regions.py similarity index 98% rename from gceapi/tests/api/test_regions.py rename to gceapi/tests/unit/api/test_regions.py index a8f590b..e4443f5 100644 --- a/gceapi/tests/api/test_regions.py +++ b/gceapi/tests/unit/api/test_regions.py @@ -13,7 +13,7 @@ # limitations under the License. from gceapi.api import regions -from gceapi.tests.api import common +from gceapi.tests.unit.api import common EXPECTED_REGIONS = [ diff --git a/gceapi/tests/api/test_routes.py b/gceapi/tests/unit/api/test_routes.py similarity index 99% rename from gceapi/tests/api/test_routes.py rename to gceapi/tests/unit/api/test_routes.py index 87b8c72..5b61f39 100644 --- a/gceapi/tests/api/test_routes.py +++ b/gceapi/tests/unit/api/test_routes.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from gceapi.tests.api import common +from gceapi.tests.unit.api import common FAKE_LOCAL_ROUTE_1 = { u'priority': 1000, diff --git a/gceapi/tests/api/test_snapshots.py b/gceapi/tests/unit/api/test_snapshots.py similarity index 99% rename from gceapi/tests/api/test_snapshots.py rename to gceapi/tests/unit/api/test_snapshots.py index 1ae000a..1861840 100644 --- a/gceapi/tests/api/test_snapshots.py +++ b/gceapi/tests/unit/api/test_snapshots.py @@ -15,7 +15,7 @@ import copy from gceapi.api import snapshots -from gceapi.tests.api import common +from gceapi.tests.unit.api import common EXPECTED_SNAPSHOTS = [{ "kind": "compute#snapshot", diff --git a/gceapi/tests/api/test_zones.py b/gceapi/tests/unit/api/test_zones.py similarity index 98% rename from gceapi/tests/api/test_zones.py rename to gceapi/tests/unit/api/test_zones.py index d72e591..ef7133a 100644 --- a/gceapi/tests/api/test_zones.py +++ b/gceapi/tests/unit/api/test_zones.py @@ -13,7 +13,7 @@ # limitations under the License. from gceapi.api import zones -from gceapi.tests.api import common +from gceapi.tests.unit.api import common EXPECTED_ZONES = [{ diff --git a/gceapi/tests/api/utils.py b/gceapi/tests/unit/api/utils.py similarity index 100% rename from gceapi/tests/api/utils.py rename to gceapi/tests/unit/api/utils.py diff --git a/gceapi/test.py b/gceapi/tests/unit/test.py similarity index 95% rename from gceapi/test.py rename to gceapi/tests/unit/test.py index fb71a7b..ad0dc4e 100644 --- a/gceapi/test.py +++ b/gceapi/tests/unit/test.py @@ -30,21 +30,16 @@ import collections import eventlet import fixtures import mox -from oslo.config import cfg +from oslo_config import cfg +from oslo_log import log as logging import stubout import testtools -from gceapi.openstack.common import log as logging from gceapi import paths CONF = cfg.CONF -CONF.import_opt('connection', - 'gceapi.openstack.common.db.sqlalchemy.session', - group='database') -CONF.set_override('use_stderr', False) -logging.setup('gceapi') LOG = logging.getLogger(__name__) eventlet.monkey_patch(os=False) diff --git a/gceapi/version.py b/gceapi/version.py index 97f9dda..f9df1c5 100644 --- a/gceapi/version.py +++ b/gceapi/version.py @@ -1,18 +1,16 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation +# Copyright 2014 +# The Cloudscaling Group, Inc. # -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 # -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. import pbr.version diff --git a/gceapi/wsgi.py b/gceapi/wsgi.py index 5d27795..e6af589 100644 --- a/gceapi/wsgi.py +++ b/gceapi/wsgi.py @@ -21,11 +21,12 @@ import os.path import socket -import sys import eventlet.wsgi import greenlet -from oslo.config import cfg +from oslo_config import cfg +from oslo_log import log as logging +from oslo_utils import excutils from paste import deploy import routes.middleware import ssl @@ -33,9 +34,8 @@ import webob.dec import webob.exc from gceapi import exception -from gceapi.openstack.common import excutils -from gceapi.openstack.common.gettextutils import _ -from gceapi.openstack.common import log as logging +from gceapi.i18n import _ + wsgi_opts = [ cfg.StrOpt('api_paste_config', @@ -95,7 +95,6 @@ class Server(object): self._protocol = protocol self._pool = eventlet.GreenPool(pool_size or self.default_pool_size) self._logger = logging.getLogger("gceapi.%s.wsgi.server" % self.name) - self._wsgi_logger = logging.WritableLogger(self._logger) self._use_ssl = use_ssl self._max_url_len = max_url_len @@ -186,7 +185,6 @@ class Server(object): 'site': self.app, 'protocol': self._protocol, 'custom_pool': self._pool, - 'log': self._wsgi_logger, 'log_format': CONF.wsgi_log_format } @@ -357,42 +355,6 @@ class Middleware(Application): return self.process_response(response) -class Debug(Middleware): - """Helper class for debugging a WSGI application. - - Can be inserted into any WSGI application chain to get information - about the request and response. - - """ - - @webob.dec.wsgify(RequestClass=Request) - def __call__(self, req): - print(('*' * 40) + ' REQUEST ENVIRON') - for key, value in req.environ.items(): - print(key, '=', value) - print() - resp = req.get_response(self.application) - - print(('*' * 40) + ' RESPONSE HEADERS') - for (key, value) in resp.headers.iteritems(): - print(key, '=', value) - print() - - resp.app_iter = self.print_generator(resp.app_iter) - - return resp - - @staticmethod - def print_generator(app_iter): - """Iterator that prints the contents of a wrapper string.""" - print ('*' * 40) + ' BODY' - for part in app_iter: - sys.stdout.write(part) - sys.stdout.flush() - yield part - print - - class Router(object): """WSGI middleware that maps incoming requests to WSGI apps.""" diff --git a/gceapi/wsgi_ext.py b/gceapi/wsgi_ext.py index 62e58d3..69e44e9 100644 --- a/gceapi/wsgi_ext.py +++ b/gceapi/wsgi_ext.py @@ -21,13 +21,13 @@ import re import routes import time +from oslo_log import log as logging +from oslo_serialization import jsonutils import webob from gceapi import exception -from gceapi.openstack.common import gettextutils -from gceapi.openstack.common.gettextutils import _ -from gceapi.openstack.common import jsonutils -from gceapi.openstack.common import log as logging +from gceapi import i18n +from gceapi.i18n import _ from gceapi import wsgi @@ -201,7 +201,7 @@ class Request(webob.Request): if not self.accept_language: return None return self.accept_language.best_match( - gettextutils.get_available_languages('gceapi')) + i18n.get_available_languages('gceapi')) class ActionDispatcher(object): @@ -714,7 +714,7 @@ class Fault(webob.exc.HTTPException): LOG.debug(_("Returning %(code)s to user: %(explanation)s"), {'code': code, 'explanation': explanation}) - explanation = gettextutils.translate(explanation, user_locale) + explanation = i18n.translate(explanation, user_locale) fault_data = { fault_name: { 'code': code, @@ -779,11 +779,11 @@ class RateLimitFault(webob.exc.HTTPException): metadata = {"attributes": {"overLimit": ["code", "retryAfter"]}} self.content['overLimit']['message'] = \ - gettextutils.translate( + i18n.translate( self.content['overLimit']['message'], user_locale) self.content['overLimit']['details'] = \ - gettextutils.translate( + i18n.translate( self.content['overLimit']['details'], user_locale) diff --git a/install.sh b/install.sh index 8ecf5b5..c9a8031 100755 --- a/install.sh +++ b/install.sh @@ -251,4 +251,4 @@ sudo rm -rf build gce_api.egg-info #recreate database echo Setuping database -sudo bin/gceapi-db-setup deb +sudo tools/db/gceapi-db-setup deb diff --git a/openstack-common.conf b/openstack-common.conf index 8f8e19b..eff3224 100644 --- a/openstack-common.conf +++ b/openstack-common.conf @@ -1,7 +1,7 @@ [DEFAULT] # The list of modules to copy from openstack-common -modules=db,db.sqlalchemy,eventlet_backdoor,gettextutils,excutils,jsonutils,local,timeutils +modules=eventlet_backdoor # The base module to hold the copy of openstack.common base=gceapi diff --git a/requirements.txt b/requirements.txt index ae1cab8..e4d7737 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,24 +1,25 @@ -anyjson>=0.3.3 argparse Babel>=1.3 -eventlet>=0.13.0 +eventlet>=0.17.4 greenlet>=0.3.2 -iso8601>=0.1.9 -jsonschema>=2.0.0,<3.0.0 -oslo.config>=1.2.1 +oslo.concurrency>=2.3.0 # Apache-2.0 +oslo.config>=2.3.0 # Apache-2.0 +oslo.log>=1.8.0 # Apache-2.0 +oslo.utils>=2.0.0 # Apache-2.0 +oslo.serialization>=1.4.0 # Apache-2.0 +oslo.db>=2.4.1 # Apache-2.0 paramiko>=1.13.0 Paste PasteDeploy>=1.5.0 -pbr>=0.6,!=0.7,<1.0 -pyasn1 -python-cinderclient>=1.0.6 -python-glanceclient>=0.9.0 -python-keystoneclient>=0.9.0 -python-neutronclient>=2.3.5,<3 -python-novaclient>=2.17.0 -Routes>=1.12.3 -six>=1.7.0 -SQLAlchemy>=0.8.4,!=0.9.5,<=0.9.99 -sqlalchemy-migrate>=0.9.1 -stevedore>=0.14 +pbr<2.0,>=1.6 +python-cinderclient>=1.3.1 +python-glanceclient>=0.18.0 +python-keystoneclient>=1.6.0 +python-neutronclient<3,>=2.6.0 +python-novaclient>=2.26.0 +Routes>=1.12.3,!=2.0,!=2.1;python_version=='2.7' +Routes>=1.12.3,!=2.0;python_version!='2.7' +six>=1.9.0 +SQLAlchemy<1.1.0,>=0.9.9 +sqlalchemy-migrate>=0.9.6 WebOb>=1.2.3 diff --git a/run_tests.sh b/run_tests.sh deleted file mode 100755 index 385d0df..0000000 --- a/run_tests.sh +++ /dev/null @@ -1,123 +0,0 @@ -#!/bin/bash - -function usage { - echo "Usage: $0 [OPTION]..." - echo "Run Gceapi's test suite(s)" - echo "" - echo " -V, --virtual-env Always use virtualenv. Install automatically if not present" - echo " -N, --no-virtual-env Don't use virtualenv. Run tests in local environment" - echo " -f, --force Force a clean re-build of the virtual environment. Useful when dependencies have been added." - echo " -u, --update Update the virtual environment with any newer package versions" - echo " --unittests-only Run unit tests only, exclude functional tests." - echo " -p, --flake8 Just run flake8" - echo " -P, --no-flake8 Don't run static code checks" - echo " -h, --help Print this usage message" - echo "" - echo "Note: with no options specified, the script will try to run the tests in a virtual environment," - echo " If no virtualenv is found, the script will ask if you would like to create one. If you " - echo " prefer to run tests NOT in a virtual environment, simply pass the -N option." - exit -} - -function process_option { - case "$1" in - -h|--help) usage;; - -V|--virtual-env) let always_venv=1; let never_venv=0;; - -N|--no-virtual-env) let always_venv=0; let never_venv=1;; - -p|--flake8) let just_flake8=1;; - -P|--no-flake8) let no_flake8=1;; - -f|--force) let force=1;; - -u|--update) update=1;; - --unittests-only) noseopts="$noseopts --exclude-dir=gceapi/tests/functional";; - -c|--coverage) noseopts="$noseopts --with-coverage --cover-package=gceapi";; - -*) noseopts="$noseopts $1";; - *) noseargs="$noseargs $1" - esac -} - -venv=.venv -with_venv=tools/with_venv.sh -always_venv=0 -never_venv=0 -force=0 -noseopts= -noseargs= -wrapper="" -just_flake8=0 -no_flake8=0 -update=0 - -export NOSE_WITH_OPENSTACK=1 -export NOSE_OPENSTACK_COLOR=1 -export NOSE_OPENSTACK_RED=0.05 -export NOSE_OPENSTACK_YELLOW=0.025 -export NOSE_OPENSTACK_SHOW_ELAPSED=1 -export NOSE_OPENSTACK_STDOUT=1 - -for arg in "$@"; do - process_option $arg -done - -function run_tests { - # Cleanup *pyc - ${wrapper} find . -type f -name "*.pyc" -delete - # Just run the test suites in current environment - ${wrapper} rm -f tests.sqlite - ${wrapper} $NOSETESTS -} - -function run_flake8 { - echo "Running flake8 ..." - if [ $never_venv -eq 1 ]; then - echo "**WARNING**:" >&2 - echo "Running flake8 without virtual env may miss OpenStack HACKING detection" >&2 - fi - - ${wrapper} flake8 -} - - -NOSETESTS="nosetests $noseopts $noseargs" - -if [ $never_venv -eq 0 ] -then - # Remove the virtual environment if --force used - if [ $force -eq 1 ]; then - echo "Cleaning virtualenv..." - rm -rf ${venv} - fi - if [ $update -eq 1 ]; then - echo "Updating virtualenv..." - python tools/install_venv.py - fi - if [ -e ${venv} ]; then - wrapper="${with_venv}" - else - if [ $always_venv -eq 1 ]; then - # Automatically install the virtualenv - python tools/install_venv.py - wrapper="${with_venv}" - else - echo -e "No virtual environment found...create one? (Y/n) \c" - read use_ve - if [ "x$use_ve" = "xY" -o "x$use_ve" = "x" -o "x$use_ve" = "xy" ]; then - # Install the virtualenv and run the test suite in it - python tools/install_venv.py - wrapper=${with_venv} - fi - fi - fi -fi - -if [ $just_flake8 -eq 1 ]; then - run_flake8 - exit -fi - -run_tests || exit - -if [ -z "$noseargs" ]; then - if [ $no_flake8 -eq 0 ]; then - run_flake8 - fi -fi diff --git a/setup.cfg b/setup.cfg index 7e3c2de..68882bc 100644 --- a/setup.cfg +++ b/setup.cfg @@ -4,9 +4,10 @@ version = 2014.1.1 summary = OpenStack Gceapi Service description-file = README.rst +license = Apache License, Version 2.0 author = OpenStack author-email = openstack-dev@lists.openstack.org -home-page = http://www.openstack.org/ +home-page = https://launchpad.net/gce-api classifier = Environment :: OpenStack Intended Audience :: Information Technology @@ -14,21 +15,20 @@ classifier = License :: OSI Approved :: Apache Software License Operating System :: POSIX :: Linux Programming Language :: Python - Programming Language :: Python :: 2 - Programming Language :: Python :: 2.7 - Programming Language :: Python :: 2.6 [files] packages = gceapi -scripts = - bin/gceapi-db-setup [global] setup-hooks = pbr.hooks.setup_hook [entry_points] +oslo.config.opts = + gceapi = gceapi.opts:list_opts + gceapi.api = gceapi.api.opts:list_opts + console_scripts = gce-api=gceapi.cmd.api:main gce-api-manage=gceapi.cmd.manage:main @@ -56,14 +56,3 @@ input_file = gceapi/locale/gceapi.pot keywords = _ gettext ngettext l_ lazy_gettext mapping_file = babel.cfg output_file = gceapi/locale/gceapi.pot - -[nosetests] -# NOTE(jkoelker) To run the test suite under nose install the following -# coverage http://pypi.python.org/pypi/coverage -# tissue http://pypi.python.org/pypi/tissue (pep8 checker) -# openstack-nose https://github.com/jkoelker/openstack-nose -verbosity=2 -tests=gceapi/tests -cover-package = gceapi -cover-html = true -cover-erase = true diff --git a/setup.py b/setup.py index 70c2b3f..7363757 100644 --- a/setup.py +++ b/setup.py @@ -17,6 +17,14 @@ # THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT import setuptools +# In python < 2.7.4, a lazy loading of package `pbr` will break +# setuptools if some other modules registered functions in `atexit`. +# solution from: http://bugs.python.org/issue15881#msg170215 +try: + import multiprocessing # noqa +except ImportError: + pass + setuptools.setup( setup_requires=['pbr'], pbr=True) diff --git a/test-requirements.txt b/test-requirements.txt index ae1b408..5e8a1d6 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -2,12 +2,13 @@ coverage>=3.6 discover feedparser fixtures>=0.3.14 -hacking>=0.8.0,<0.9 +hacking<0.11,>=0.10.2 mox>=0.5.3 mock>=1.0 oslo.sphinx pylint==0.25.2 python-subunit>=0.0.18 -sphinx>=1.1.2,<1.2 -testrepository>=0.0.17 +sphinx!=1.2.0,!=1.3b1,<1.3,>=1.1.2 +tempest-lib>=0.6.1 +testrepository>=0.0.18 testtools>=0.9.34 diff --git a/bin/gceapi-db-setup b/tools/db/gceapi-db-setup similarity index 100% rename from bin/gceapi-db-setup rename to tools/db/gceapi-db-setup diff --git a/tools/lintstack.py b/tools/lintstack.py deleted file mode 100755 index 15dda49..0000000 --- a/tools/lintstack.py +++ /dev/null @@ -1,199 +0,0 @@ -#!/usr/bin/env python -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright (c) 2012, AT&T Labs, Yun Mao -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""pylint error checking.""" - -import cStringIO as StringIO -import json -import re -import sys - -from pylint import lint -from pylint.reporters import text - -# Note(maoy): E1103 is error code related to partial type inference -ignore_codes = ["E1103"] -# Note(maoy): the error message is the pattern of E0202. It should be ignored -# for gceapi.tests modules -ignore_messages = ["An attribute affected in gceapi.tests"] -# Note(maoy): we ignore all errors in openstack.common because it should be -# checked elsewhere. We also ignore gceapi.tests for now due to high false -# positive rate. -ignore_modules = ["gceapi/openstack/common/", "gceapi/tests/"] - -KNOWN_PYLINT_EXCEPTIONS_FILE = "tools/pylint_exceptions" - - -class LintOutput(object): - - _cached_filename = None - _cached_content = None - - def __init__(self, filename, lineno, line_content, code, message, - lintoutput): - self.filename = filename - self.lineno = lineno - self.line_content = line_content - self.code = code - self.message = message - self.lintoutput = lintoutput - - @classmethod - def from_line(cls, line): - m = re.search(r"(\S+):(\d+): \[(\S+)(, \S+)?] (.*)", line) - matched = m.groups() - filename, lineno, code, message = (matched[0], int(matched[1]), - matched[2], matched[-1]) - if cls._cached_filename != filename: - with open(filename) as f: - cls._cached_content = list(f.readlines()) - cls._cached_filename = filename - line_content = cls._cached_content[lineno - 1].rstrip() - return cls(filename, lineno, line_content, code, message, - line.rstrip()) - - @classmethod - def from_msg_to_dict(cls, msg): - """From the output of pylint msg, to a dict, where each key - is a unique error identifier, value is a list of LintOutput - """ - result = {} - for line in msg.splitlines(): - obj = cls.from_line(line) - if obj.is_ignored(): - continue - key = obj.key() - if key not in result: - result[key] = [] - result[key].append(obj) - return result - - def is_ignored(self): - if self.code in ignore_codes: - return True - if any(self.filename.startswith(name) for name in ignore_modules): - return True - if any(msg in self.message for msg in ignore_messages): - return True - return False - - def key(self): - if self.code in ["E1101", "E1103"]: - # These two types of errors are like Foo class has no member bar. - # We discard the source code so that the error will be ignored - # next time another Foo.bar is encountered. - return self.message, "" - return self.message, self.line_content.strip() - - def json(self): - return json.dumps(self.__dict__) - - def review_str(self): - return ("File %(filename)s\nLine %(lineno)d:%(line_content)s\n" - "%(code)s: %(message)s" % self.__dict__) - - -class ErrorKeys(object): - - @classmethod - def print_json(cls, errors, output=sys.stdout): - print >>output, "# automatically generated by tools/lintstack.py" - for i in sorted(errors.keys()): - print >>output, json.dumps(i) - - @classmethod - def from_file(cls, filename): - keys = set() - for line in open(filename): - if line and line[0] != "#": - d = json.loads(line) - keys.add(tuple(d)) - return keys - - -def run_pylint(): - buff = StringIO.StringIO() - reporter = text.ParseableTextReporter(output=buff) - args = ["--include-ids=y", "-E", "gceapi"] - lint.Run(args, reporter=reporter, exit=False) - val = buff.getvalue() - buff.close() - return val - - -def generate_error_keys(msg=None): - print "Generating", KNOWN_PYLINT_EXCEPTIONS_FILE - if msg is None: - msg = run_pylint() - errors = LintOutput.from_msg_to_dict(msg) - with open(KNOWN_PYLINT_EXCEPTIONS_FILE, "w") as f: - ErrorKeys.print_json(errors, output=f) - - -def validate(newmsg=None): - print "Loading", KNOWN_PYLINT_EXCEPTIONS_FILE - known = ErrorKeys.from_file(KNOWN_PYLINT_EXCEPTIONS_FILE) - if newmsg is None: - print "Running pylint. Be patient..." - newmsg = run_pylint() - errors = LintOutput.from_msg_to_dict(newmsg) - - print "Unique errors reported by pylint: was %d, now %d." \ - % (len(known), len(errors)) - passed = True - for err_key, err_list in errors.items(): - for err in err_list: - if err_key not in known: - print err.lintoutput - print - passed = False - if passed: - print "Congrats! pylint check passed." - redundant = known - set(errors.keys()) - if redundant: - print "Extra credit: some known pylint exceptions disappeared." - for i in sorted(redundant): - print json.dumps(i) - print "Consider regenerating the exception file if you will." - else: - print ("Please fix the errors above. If you believe they are false" - " positives, run 'tools/lintstack.py generate' to overwrite.") - sys.exit(1) - - -def usage(): - print """Usage: tools/lintstack.py [generate|validate] - To generate pylint_exceptions file: tools/lintstack.py generate - To validate the current commit: tools/lintstack.py - """ - - -def main(): - option = "validate" - if len(sys.argv) > 1: - option = sys.argv[1] - if option == "generate": - generate_error_keys() - elif option == "validate": - validate() - else: - usage() - - -if __name__ == "__main__": - main() diff --git a/tools/lintstack.sh b/tools/lintstack.sh deleted file mode 100755 index d8591d0..0000000 --- a/tools/lintstack.sh +++ /dev/null @@ -1,59 +0,0 @@ -#!/usr/bin/env bash - -# Copyright (c) 2012-2013, AT&T Labs, Yun Mao -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -# Use lintstack.py to compare pylint errors. -# We run pylint twice, once on HEAD, once on the code before the latest -# commit for review. -set -e -TOOLS_DIR=$(cd $(dirname "$0") && pwd) -# Get the current branch name. -GITHEAD=`git rev-parse --abbrev-ref HEAD` -if [[ "$GITHEAD" == "HEAD" ]]; then - # In detached head mode, get revision number instead - GITHEAD=`git rev-parse HEAD` - echo "Currently we are at commit $GITHEAD" -else - echo "Currently we are at branch $GITHEAD" -fi - -cp -f $TOOLS_DIR/lintstack.py $TOOLS_DIR/lintstack.head.py - -if git rev-parse HEAD^2 2>/dev/null; then - # The HEAD is a Merge commit. Here, the patch to review is - # HEAD^2, the master branch is at HEAD^1, and the patch was - # written based on HEAD^2~1. - PREV_COMMIT=`git rev-parse HEAD^2~1` - git checkout HEAD~1 - # The git merge is necessary for reviews with a series of patches. - # If not, this is a no-op so won't hurt either. - git merge $PREV_COMMIT -else - # The HEAD is not a merge commit. This won't happen on gerrit. - # Most likely you are running against your own patch locally. - # We assume the patch to examine is HEAD, and we compare it against - # HEAD~1 - git checkout HEAD~1 -fi - -# First generate tools/pylint_exceptions from HEAD~1 -$TOOLS_DIR/lintstack.head.py generate -# Then use that as a reference to compare against HEAD -git checkout $GITHEAD -$TOOLS_DIR/lintstack.head.py -echo "Check passed. FYI: the pylint exceptions are:" -cat $TOOLS_DIR/pylint_exceptions - diff --git a/tox.ini b/tox.ini index be092c4..0e4d83a 100644 --- a/tox.ini +++ b/tox.ini @@ -1,14 +1,12 @@ [tox] minversion = 1.6 -envlist = py26,py27,py33,py34,pep8 +envlist = py27,py33,py34,pep8 skipsdist = True [testenv] -sitepackages = True +sitepackages = False usedevelop = True -install_command = pip install -U {opts} {packages} -# Note the hash seed is set to 0 until gce-api can be tested with a -# random hash seed successfully. +install_command = pip install -U --force-reinstall {opts} {packages} setenv = VIRTUAL_ENV={envdir} LANG=en_US.UTF-8 LANGUAGE=en_US:en @@ -21,7 +19,7 @@ commands = python setup.py testr --slowest --testr-args='{posargs}' [tox:jenkins] -sitepackages = True +sitepackages = False downloadcache = ~/cache/pip [testenv:pep8] @@ -29,8 +27,8 @@ sitepackages = False commands = flake8 {posargs} -[testenv:pylint] -commands = bash tools/lintstack.sh +[testenv:genconfig] +commands = oslo-config-generator --config-file=etc/ec2api/ec2api-config-generator.conf [testenv:cover] # Also do not run test_coverage_ext tests while gathering coverage as those @@ -50,8 +48,9 @@ commands = {posargs} # TODO Hacking 0.6 checks to fix # H102 Apache 2.0 license header not found -ignore = E121,E122,E123,E124,E126,E127,E128,E711,E712,H102,H303,H404,F403,F811,F841,H803 +ignore = E121,E122,E123,E124,E126,E127,E128,E711,E712,H102,H303,H404,F403,F811,F841,H803 ,E131,E265,H236,H405,H501 exclude = .venv,.git,.tox,dist,doc,*openstack/common*,*lib/python*,*egg,build,tools +max-complexity=25 [hacking] -import_exceptions = gceapi.openstack.common.gettextutils._ +import_exceptions = gceapi.i18n