From 3149d560c11579570cb2f986bdce92251f2d3684 Mon Sep 17 00:00:00 2001 From: Lingxian Kong Date: Tue, 9 Feb 2016 14:11:23 +1300 Subject: [PATCH] Sync distil project structure with internal one We will lose commits history for some reason, so dump the commits history as the following: db927106e7362f25384b1e7113d92538ba100f38 Remove residual oerplib dependency 42d8a04a8c66299c4c4c58643c89f043ee3b3f13 Fix import error 202937f0d8b4f3dd5f18e989a6e2dfb3a633fa8c Switch oerplib to odoorpc 2e307f84e5f8e93da67fd4c32683c246237c864d Add update-quote function 0c9d8c29ae112f9758347eba153e1fb5fed8b482 Set the correct sales team df26990838c0d7b0c6721825989706d07747e92a Support traffic billing for odoo script 2c75433a29fd5c42a2a29b41617b5fa4e42103b4 Add retry support and audit 97aadd2179496255c06f5c1c2a9368500ea74588 packaging: python-distil depends on client 021ebf0e21a6e42ed853b507fb4f16daaad14b49 Bump changelog to 0.5.10 604b906afac25c9c64862799a0d12565db2064a5 Fix network service tranform issue 956c8368435bd9ff3bba45bdde18fb72102a9d5c Skip traffic data a404539dd7d0fa2e19fd2f5536bbd0c18b3bc468 Prep for Odoo migration f10743440375cdc5512d0c4fff8b239cbdf42e0d Fix changes for Debian packaging b76536576e63848aeaa29326a0bac2cf8092e935 Bump version number and add myself to uploaders 7af68dcc4b521e5851a5f18cc647f4312bfd7fa4 fixing an error if counter volume is None f7ec36305e174cd3b1c77cf96fd9025e7774c420 Fix the multithread issue of strptime 1b16e9bfb4d5e97bd071569d3bca864d6b236cca add debian packaging 9149898015c8d4f859b316a85608bea905a3ca62 Support dynamic source 4dd95ba8be2d8d8a58bb0d6a0ba80cadfb54dd75 dropping near zero entries 766bee76234ffcf71cd0cdd13e1e3f677389390e Merge multi region in odoo-glue eddc940dbe9cdbcdf9109e3553ffe65ad80c1496 Make it free for one network and one router 2bb70fe2ecddde3b5db1fa64fc1416bc8835ee93 Don't read rates file everytime accf74e36519a116acf909cab85cd070dba998a0 version bump 7035605a9ff1e42a1120ee438dfe6de413bf2b76 fetch resource metadata in bulk when handling get_usage/get_rated. 5c6f7c0831b9560d85ce33e690c6d299470b7de8 version bump to 0.5.2 5a6233e18aa0e79bf6504f8fbe9bd04d7b4afa39 update rates 6f985422ea42f7ac545ed66e62a18760f14e3913 update odoo glue scripts for distilclient-0.5.1 api break 33c54fe6fc892d4b5621170f5e31ee9df63ed287 Tweak distilclient packaging to not require custom PYTHONPATH dc22431354ff96b02fb4def6b010575cc1f1be72 get rid of product-name-in-line-description kludge 10925492548183749cac77202d71e38291bb5440 setting up memcache 1e106cf1e012102a3cd49af543401826e67eb6a9 version bump to 0.5.0 ac29b39c629b466dcf2e04655b0491d5eb00040b Second uptime transformer for instance metric 10e37ad6b4c222efbfe0f88261ecd4fb89df93e4 adding another state to billable states 0e647ead7f6a990d279211caf2993f963372032d client: Rename some functions for clarity 7280ed585b0524c4376ef89116210f7c68ae94e4 Enable the tox for Distil 6daafca0a9f0b2a8688b9906fbea320eb4d40b2f Removing sales-order functionality from the api c12a41d5b67eb8b95abcfbc2d9eeb96cc56e8b2c version bump to 0.4.3 af86e1af31efb14ff6dac39f0c3b642fd4a4e0c4 distil: Region awareness for nova and cinder lookups bd71c7106093d2a178417f042201e8ee68ac9929 odoo: Add glue script to set current prices in distil 5a8b1596f4f08c2ea060d0da9dac844d6ae8039d odoo: Make region-aware 540eb5234f139bf9c51eb60571a81bda25cd9dd7 Import odoo-glue tree into odoo/ 8ed7c74f91d2ffd6c0b1ac50274e238f9e020502 Minor fix to tenant_validation fed3c8d4d86e09750ac3fa324bded977abe698f5 Require admin or owner bffebdf974f7e1aad210ff99040424a8f2da19f2 adding missing package depends for cinderclient 6198f1ec1fdbe0390a43571320159317e7dffa27 add toplevel Change-Id: I81581ca2aaee006daad2c1071068c651865ebb32 --- .gitignore | 8 + Makefile | 4 +- bin/distil | 5 +- client.mk | 9 +- client/client.py | 70 +- client/shell.py | 74 +- debian/.gitignore | 7 + debian/changelog | 57 ++ debian/compat | 1 + debian/control | 43 ++ debian/pydist-overrides | 2 + debian/python-distil.install | 2 + debian/python-distilclient.install | 2 + debian/rules | 7 + debian/source/format | 1 + debian/source/options | 1 + distil/NoPickle.py | 16 + distil/api/helpers.py | 31 +- distil/api/web.py | 410 +++++----- distil/config.py | 9 + distil/constants.py | 2 +- distil/database.py | 10 +- distil/helpers.py | 10 +- distil/tests/__init__.py | 0 distil/tests/unit/__init__.py | 0 .../tests/unit}/data/ceilometer_json.py | 0 .../tests/unit}/data/map_fixture_0.json | 0 .../tests/unit}/data/map_fixture_1.json | 0 .../tests/unit}/data/map_fixture_10.json | 0 .../tests/unit}/data/map_fixture_100.json | 0 .../tests/unit}/data/map_fixture_101.json | 0 .../tests/unit}/data/map_fixture_102.json | 0 .../tests/unit}/data/map_fixture_103.json | 0 .../tests/unit}/data/map_fixture_104.json | 0 .../tests/unit}/data/map_fixture_105.json | 0 .../tests/unit}/data/map_fixture_106.json | 0 .../tests/unit}/data/map_fixture_107.json | 0 .../tests/unit}/data/map_fixture_11.json | 0 .../tests/unit}/data/map_fixture_12.json | 0 .../tests/unit}/data/map_fixture_13.json | 0 .../tests/unit}/data/map_fixture_14.json | 0 .../tests/unit}/data/map_fixture_15.json | 0 .../tests/unit}/data/map_fixture_16.json | 0 .../tests/unit}/data/map_fixture_17.json | 0 .../tests/unit}/data/map_fixture_18.json | 0 .../tests/unit}/data/map_fixture_19.json | 0 .../tests/unit}/data/map_fixture_2.json | 0 .../tests/unit}/data/map_fixture_20.json | 0 .../tests/unit}/data/map_fixture_21.json | 0 .../tests/unit}/data/map_fixture_22.json | 0 .../tests/unit}/data/map_fixture_23.json | 0 .../tests/unit}/data/map_fixture_24.json | 0 .../tests/unit}/data/map_fixture_25.json | 0 .../tests/unit}/data/map_fixture_26.json | 0 .../tests/unit}/data/map_fixture_27.json | 0 .../tests/unit}/data/map_fixture_28.json | 0 .../tests/unit}/data/map_fixture_29.json | 0 .../tests/unit}/data/map_fixture_3.json | 0 .../tests/unit}/data/map_fixture_30.json | 0 .../tests/unit}/data/map_fixture_31.json | 0 .../tests/unit}/data/map_fixture_32.json | 0 .../tests/unit}/data/map_fixture_33.json | 0 .../tests/unit}/data/map_fixture_34.json | 0 .../tests/unit}/data/map_fixture_35.json | 0 .../tests/unit}/data/map_fixture_36.json | 0 .../tests/unit}/data/map_fixture_37.json | 0 .../tests/unit}/data/map_fixture_38.json | 0 .../tests/unit}/data/map_fixture_39.json | 0 .../tests/unit}/data/map_fixture_4.json | 0 .../tests/unit}/data/map_fixture_40.json | 0 .../tests/unit}/data/map_fixture_41.json | 0 .../tests/unit}/data/map_fixture_42.json | 0 .../tests/unit}/data/map_fixture_43.json | 0 .../tests/unit}/data/map_fixture_44.json | 0 .../tests/unit}/data/map_fixture_45.json | 0 .../tests/unit}/data/map_fixture_46.json | 0 .../tests/unit}/data/map_fixture_47.json | 0 .../tests/unit}/data/map_fixture_48.json | 0 .../tests/unit}/data/map_fixture_49.json | 0 .../tests/unit}/data/map_fixture_5.json | 0 .../tests/unit}/data/map_fixture_50.json | 0 .../tests/unit}/data/map_fixture_51.json | 0 .../tests/unit}/data/map_fixture_52.json | 0 .../tests/unit}/data/map_fixture_53.json | 0 .../tests/unit}/data/map_fixture_54.json | 0 .../tests/unit}/data/map_fixture_55.json | 0 .../tests/unit}/data/map_fixture_56.json | 0 .../tests/unit}/data/map_fixture_57.json | 0 .../tests/unit}/data/map_fixture_58.json | 0 .../tests/unit}/data/map_fixture_59.json | 0 .../tests/unit}/data/map_fixture_6.json | 0 .../tests/unit}/data/map_fixture_60.json | 0 .../tests/unit}/data/map_fixture_61.json | 0 .../tests/unit}/data/map_fixture_62.json | 0 .../tests/unit}/data/map_fixture_63.json | 0 .../tests/unit}/data/map_fixture_64.json | 0 .../tests/unit}/data/map_fixture_65.json | 0 .../tests/unit}/data/map_fixture_66.json | 0 .../tests/unit}/data/map_fixture_67.json | 0 .../tests/unit}/data/map_fixture_68.json | 0 .../tests/unit}/data/map_fixture_69.json | 0 .../tests/unit}/data/map_fixture_7.json | 0 .../tests/unit}/data/map_fixture_70.json | 0 .../tests/unit}/data/map_fixture_71.json | 0 .../tests/unit}/data/map_fixture_72.json | 0 .../tests/unit}/data/map_fixture_73.json | 0 .../tests/unit}/data/map_fixture_74.json | 0 .../tests/unit}/data/map_fixture_75.json | 0 .../tests/unit}/data/map_fixture_76.json | 0 .../tests/unit}/data/map_fixture_77.json | 0 .../tests/unit}/data/map_fixture_78.json | 0 .../tests/unit}/data/map_fixture_79.json | 0 .../tests/unit}/data/map_fixture_8.json | 0 .../tests/unit}/data/map_fixture_80.json | 0 .../tests/unit}/data/map_fixture_81.json | 0 .../tests/unit}/data/map_fixture_82.json | 0 .../tests/unit}/data/map_fixture_83.json | 0 .../tests/unit}/data/map_fixture_84.json | 0 .../tests/unit}/data/map_fixture_85.json | 0 .../tests/unit}/data/map_fixture_86.json | 0 .../tests/unit}/data/map_fixture_87.json | 0 .../tests/unit}/data/map_fixture_88.json | 0 .../tests/unit}/data/map_fixture_89.json | 0 .../tests/unit}/data/map_fixture_9.json | 0 .../tests/unit}/data/map_fixture_90.json | 0 .../tests/unit}/data/map_fixture_91.json | 0 .../tests/unit}/data/map_fixture_92.json | 0 .../tests/unit}/data/map_fixture_93.json | 0 .../tests/unit}/data/map_fixture_94.json | 0 .../tests/unit}/data/map_fixture_95.json | 0 .../tests/unit}/data/map_fixture_96.json | 0 .../tests/unit}/data/map_fixture_97.json | 0 .../tests/unit}/data/map_fixture_98.json | 0 .../tests/unit}/data/map_fixture_99.json | 0 .../tests/unit}/data/resources.json | 0 {tests => distil/tests/unit}/data_samples.py | 0 {tests => distil/tests/unit}/test_api.py | 156 ++-- .../tests/unit/test_database.py | 6 +- .../tests/unit}/test_interface.py | 25 +- {tests => distil/tests/unit}/test_models.py | 90 +-- distil/tests/unit/test_transformers.py | 691 +++++++++++++++++ .../helpers.py => distil/tests/unit/utils.py | 53 +- distil/transformers.py | 98 ++- examples/conf.yaml | 58 +- examples/real_rates.csv | 38 +- odoo/.gitignore | 2 + odoo/README | 46 ++ odoo/glue.ini.example | 8 + odoo/odoo-glue.py | 698 ++++++++++++++++++ odoo/odoo-products-snapshot.py | 75 ++ odoo/requirements.txt | 3 + requirements.txt | 14 +- setup.cfg | 33 + setup.py | 40 +- test-requirements.txt | 13 + tests/__init__.py | 54 -- tests/constants.py | 66 -- tests/test_transformers.py | 352 --------- tox.ini | 49 ++ 159 files changed, 2413 insertions(+), 1036 deletions(-) create mode 100644 debian/.gitignore create mode 100644 debian/changelog create mode 100644 debian/compat create mode 100644 debian/control create mode 100644 debian/pydist-overrides create mode 100644 debian/python-distil.install create mode 100644 debian/python-distilclient.install create mode 100755 debian/rules create mode 100644 debian/source/format create mode 100644 debian/source/options create mode 100644 distil/NoPickle.py create mode 100644 distil/tests/__init__.py create mode 100644 distil/tests/unit/__init__.py rename {tests => distil/tests/unit}/data/ceilometer_json.py (100%) rename {tests => distil/tests/unit}/data/map_fixture_0.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_1.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_10.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_100.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_101.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_102.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_103.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_104.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_105.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_106.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_107.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_11.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_12.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_13.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_14.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_15.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_16.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_17.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_18.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_19.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_2.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_20.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_21.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_22.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_23.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_24.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_25.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_26.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_27.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_28.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_29.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_3.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_30.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_31.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_32.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_33.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_34.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_35.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_36.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_37.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_38.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_39.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_4.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_40.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_41.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_42.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_43.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_44.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_45.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_46.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_47.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_48.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_49.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_5.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_50.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_51.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_52.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_53.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_54.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_55.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_56.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_57.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_58.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_59.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_6.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_60.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_61.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_62.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_63.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_64.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_65.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_66.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_67.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_68.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_69.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_7.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_70.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_71.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_72.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_73.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_74.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_75.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_76.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_77.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_78.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_79.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_8.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_80.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_81.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_82.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_83.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_84.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_85.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_86.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_87.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_88.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_89.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_9.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_90.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_91.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_92.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_93.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_94.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_95.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_96.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_97.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_98.json (100%) rename {tests => distil/tests/unit}/data/map_fixture_99.json (100%) rename {tests => distil/tests/unit}/data/resources.json (100%) rename {tests => distil/tests/unit}/data_samples.py (100%) rename {tests => distil/tests/unit}/test_api.py (59%) rename tests/test_database_module.py => distil/tests/unit/test_database.py (86%) rename {tests => distil/tests/unit}/test_interface.py (70%) rename {tests => distil/tests/unit}/test_models.py (58%) create mode 100644 distil/tests/unit/test_transformers.py rename tests/helpers.py => distil/tests/unit/utils.py (59%) create mode 100644 odoo/.gitignore create mode 100644 odoo/README create mode 100644 odoo/glue.ini.example create mode 100755 odoo/odoo-glue.py create mode 100755 odoo/odoo-products-snapshot.py create mode 100644 odoo/requirements.txt create mode 100644 setup.cfg create mode 100644 test-requirements.txt delete mode 100644 tests/__init__.py delete mode 100644 tests/constants.py delete mode 100644 tests/test_transformers.py create mode 100644 tox.ini diff --git a/.gitignore b/.gitignore index ebaa148..8c2d38c 100644 --- a/.gitignore +++ b/.gitignore @@ -32,7 +32,15 @@ bin/pybabel bin/python bin/python2 bin/python2.7 +.project +.pydevproject +.testrepository/ +.tox/ +.venv/ +bin/logs/ bin/waitress-serve local/ test_vm/ env/ +ChangeLog +.idea diff --git a/Makefile b/Makefile index ea45dad..6bed818 100644 --- a/Makefile +++ b/Makefile @@ -1,5 +1,5 @@ -VERSION=0.4.2 +VERSION=0.5.3 NAME=distil INSTALL_PATH=/opt/stack/distil BINARY_PATH=/opt/stack/distil @@ -41,6 +41,7 @@ deb: clean init --depends python-virtualenv \ --depends python-sqlalchemy \ --depends python-keystoneclient \ + --depends python-cinderclient \ --depends python-requests \ --depends python-flask \ --depends python-novaclient \ @@ -48,6 +49,7 @@ deb: clean init --depends python-mysqldb \ --depends python-psycopg2 \ --depends python-yaml \ + --depends python-memcache \ --template-scripts \ --template-value install_path=${INSTALL_PATH} \ -C ${WORK_DIR} \ diff --git a/bin/distil b/bin/distil index d5bc630..9cb96eb 100755 --- a/bin/distil +++ b/bin/distil @@ -1,6 +1,5 @@ #!/bin/bash -DISTILPATH=/opt/stack/distilclient +DISTILPATH=/usr/lib/python2.7/dist-packages/distilclient -export PYTHONPATH=${DISTILPATH}:${PYTHONPATH} -python ${DISTILPATH}/client/shell.py "$@" +python ${DISTILPATH}/shell.py "$@" diff --git a/client.mk b/client.mk index 3ab3b96..ab713ad 100644 --- a/client.mk +++ b/client.mk @@ -1,7 +1,7 @@ -VERSION=0.4.2 +VERSION=0.5.3 NAME=distilclient -INSTALL_PATH=/opt/stack/distilclient +INSTALL_PATH=/usr/lib/python2.7/dist-packages/distilclient BINARY_PATH=/usr/local/bin WORK_DIR=./work-client @@ -18,9 +18,10 @@ init: @mkdir -p ${WORK_DIR}${BINARY_PATH} deb: clean init - @mkdir -p ${WORK_DIR}${INSTALL_PATH}/client + @mkdir -p ${WORK_DIR}${INSTALL_PATH} @cp ./bin/distil ${WORK_DIR}${BINARY_PATH}/distil - @cp -r ./client/*.py ${WORK_DIR}${INSTALL_PATH}/client/ + @cp -r ./client/*.py ${WORK_DIR}${INSTALL_PATH}/ + @cp __init__.py ${WORK_DIR}${INSTALL_PATH}/ @chmod 0755 ${WORK_DIR}${BINARY_PATH}/distil @fpm -s dir -t deb -n ${NAME} -v ${VERSION} \ --depends python2.7 \ diff --git a/client/client.py b/client/client.py index df6c8f7..5278477 100644 --- a/client/client.py +++ b/client/client.py @@ -16,7 +16,6 @@ import requests from keystoneclient.v2_0.client import Client as Keystone from requests.exceptions import ConnectionError from urlparse import urljoin -import json class Client(object): @@ -55,7 +54,7 @@ class Client(object): endpoint_type=os_endpoint_type ) - def usage(self): + def collect_usage(self): url = urljoin(self.endpoint, "collect_usage") headers = {"Content-Type": "application/json", @@ -90,17 +89,20 @@ class Client(object): print e def get_usage(self, tenant, start, end): - url = urljoin(self.endpoint, "get_usage") + return self._query_usage(tenant, start, end, "get_usage") - headers = { - "X-Auth-Token": self.auth_token - } + def get_rated(self, tenant, start, end): + return self._query_usage(tenant, start, end, "get_rated") - params = { - "tenant": tenant, - "start": start, - "end": end - } + def _query_usage(self, tenant, start, end, endpoint): + url = urljoin(self.endpoint, endpoint) + + headers = {"X-Auth-Token": self.auth_token} + + params = {"tenant": tenant, + "start": start, + "end": end + } try: response = requests.get(url, headers=headers, @@ -108,52 +110,8 @@ class Client(object): verify=not self.insecure) if response.status_code != 200: raise AttributeError("Get usage failed: %s code: %s" % - (response.text, response.status_code)) + (response.text, response.status_code)) else: return response.json() except ConnectionError as e: print e - - def _sales_order_query(self, tenants, relative_url, make_data): - url = urljoin(self.endpoint, relative_url) - - headers = {"Content-Type": "application/json", - "X-Auth-Token": self.auth_token} - - tenants_resp = {'sales_orders': [], 'errors': {}} - for tenant in tenants: - data = make_data(tenant) - try: - response = requests.post(url, headers=headers, - data=json.dumps(data), - verify=not self.insecure) - if response.status_code != 200: - error = ("Sales order cycle failed: %s Code: %s" % - (response.text, response.status_code)) - tenants_resp['errors'][tenant] = error - else: - tenants_resp['sales_orders'].append(response.json()) - except ConnectionError as e: - print e - return tenants_resp - - def sales_order(self, tenants, end, draft): - return self._sales_order_query( - tenants, - 'sales_draft' if draft else 'sales_order', - lambda tenant: {'tenant': tenant, 'end': end} - ) - - def sales_historic(self, tenants, date): - return self._sales_order_query( - tenants, - 'sales_historic', - lambda tenant: {'tenant': tenant, 'date': date} - ) - - def sales_range(self, tenants, start, end): - return self._sales_order_query( - tenants, - 'sales_range', - lambda tenant: {'tenant': tenant, 'start': start, 'end': end} - ) diff --git a/client/shell.py b/client/shell.py index 41ec8b1..8ae4b24 100644 --- a/client/shell.py +++ b/client/shell.py @@ -110,58 +110,22 @@ if __name__ == '__main__': help="End time", required=True) - sales_parser = subparsers.add_parser( - 'sales-order', - help=('create sales orders for given tenants')) - sales_parser.add_argument( - "-t", "--tenant", dest="tenants", - help='Tenants to create sales orders for.', - action="append", default=[], - required=True) - sales_parser.add_argument( - "-e", "--end", dest="end", - help='end date for sales order.') + get_rated_parser = subparsers.add_parser( + 'get-rated', help=('get rated usage')) - draft_parser = subparsers.add_parser( - 'sales-draft', - help=('create sales drafts for given tenants')) - draft_parser.add_argument( - "-t", "--tenant", dest="tenants", - help='Tenants to create sales drafts for.', - action="append", required=True) - draft_parser.add_argument( - "-e", "--end", dest="end", - help='end date for sales order.') - - historic_parser = subparsers.add_parser( - 'sales-historic', - help=('regenerate historic sales orders for given tenants,' + - 'at given date')) - historic_parser.add_argument( - "-t", "--tenant", dest="tenants", - help='Tenants to create sales drafts for.', - action="append", required=True) - historic_parser.add_argument( - "-d", "--date", dest="date", - help='target search date for sales order.', + get_rated_parser.add_argument( + "-t", "--tenant", dest="tenant", + help='Tenant to get usage for', required=True) - range_parser = subparsers.add_parser( - 'sales-range', - help=('regenerate historic sales orders for given tenants,' + - 'in a given range')) - range_parser.add_argument( - "-t", "--tenant", dest="tenants", - help='Tenants to create sales drafts for.', - action="append", required=True) - range_parser.add_argument( + get_rated_parser.add_argument( "-s", "--start", dest="start", - help='start of range for sales orders.', + help="Start time", required=True) - range_parser.add_argument( + + get_rated_parser.add_argument( "-e", "--end", dest="end", - help='end of range for sales orders. Defaults to now.', - default=None) + help="End time") args = parser.parse_args() @@ -202,7 +166,7 @@ if __name__ == '__main__': kwargs.get('os_endpoint_type', None)) if args.command == 'collect-usage': - response = client.usage() + response = client.collect_usage() print json.dumps(response, indent=2) if args.command == 'last-collected': @@ -213,18 +177,6 @@ if __name__ == '__main__': response = client.get_usage(args.tenant, args.start, args.end) print json.dumps(response, indent=2) - if args.command == 'sales-order': - response = client.sales_order(args.tenants, args.end, False) - print json.dumps(response, indent=2) - - if args.command == 'sales-draft': - response = client.sales_order(args.tenants, args.end, True) - print json.dumps(response, indent=2) - - if args.command == 'sales-historic': - response = client.sales_historic(args.tenants, args.date) - print json.dumps(response, indent=2) - - if args.command == 'sales-range': - response = client.sales_range(args.tenants, args.start, args.end) + if args.command == 'get-rated': + response = client.get_rated(args.tenant, args.start, args.end) print json.dumps(response, indent=2) diff --git a/debian/.gitignore b/debian/.gitignore new file mode 100644 index 0000000..138bbaf --- /dev/null +++ b/debian/.gitignore @@ -0,0 +1,7 @@ +files +*.debhelper.log +*.postinst.debhelper +*.prerm.debhelper +*.substvars +python-distil/ +python-distilclient diff --git a/debian/changelog b/debian/changelog new file mode 100644 index 0000000..58fd2f4 --- /dev/null +++ b/debian/changelog @@ -0,0 +1,57 @@ +distil (0.5.11) unstable; urgency=medium + + [Xav Paice] + * Add a versioned recommends for distilclient on distil to keep them + synchronised. + + -- Andrew Ruthven Mon, 07 Sep 2015 22:20:42 +1200 + +distil (0.5.10) unstable; urgency=medium + + * Prep for Odoo migration + * Skip traffic data + * Fix network service transform + + -- Andrew Ruthven Fri, 04 Sep 2015 15:36:15 +1200 + +distil (0.5.9) unstable; urgency=medium + + * Install into /usr/lib, not /usr/usr/lib. + + -- Andrew Ruthven Wed, 29 Jul 2015 22:50:20 +1200 + +distil (0.5.8) unstable; urgency=medium + + * Fix dependencies. + + -- Andrew Ruthven Wed, 29 Jul 2015 00:43:20 +1200 + +distil (0.5.7) unstable; urgency=medium + + * Actually deploy content in python-distil. + + -- Andrew Ruthven Wed, 29 Jul 2015 00:29:27 +1200 + +distil (0.5.6) unstable; urgency=medium + + * Add provides and conflicts fields for old package names. + + -- Andrew Ruthven Tue, 28 Jul 2015 21:14:01 +1200 + +distil (0.5.5) unstable; urgency=medium + + * Bump the version. + + -- Andrew Ruthven Tue, 28 Jul 2015 17:29:00 +1200 + +distil (0.5.4) unstable; urgency=medium + + * Bump the version. + + -- Andrew Ruthven Tue, 28 Jul 2015 11:08:35 +1200 + +distil (0.5.3) unstable; urgency=low + + * source package automatically created by stdeb 0.6.0+git + + -- OpenStack Wed, 01 Jul 2015 11:32:23 +1200 diff --git a/debian/compat b/debian/compat new file mode 100644 index 0000000..7f8f011 --- /dev/null +++ b/debian/compat @@ -0,0 +1 @@ +7 diff --git a/debian/control b/debian/control new file mode 100644 index 0000000..e2bc3f9 --- /dev/null +++ b/debian/control @@ -0,0 +1,43 @@ +Source: distil +Maintainer: OpenStack +Uploaders: Andrew Ruthven +Section: python +Priority: optional +Build-Depends: python-setuptools (>= 0.6b3), python-all (>= 2.6.6-3), debhelper (>= 7), dh-python, python-yaml +Standards-Version: 3.9.1 +X-Python-Version: >= 2.7 + +Package: python-distil +Architecture: all +Depends: + ${misc:Depends}, + ${python:Depends} +Recommends: python-distilclient (= ${binary:Version}) +Provides: distil +Replaces: distil +Conflicts: distil +Description: Distil project + Distil is a web app to provide easy interactions with ERP systems, by + exposing a configurable set of collection tools and transformers to make + usable billing data out of Ceilometer entries. + . + Distil provides a rest api to integrate with arbitrary ERP systems, and + returns sales orders as json. What the ranges are, and how Ceilometer data + is aggregated is intended to be configurable, and defined in the configuration + file. + . + The Distil data store will prevent overlapping bills for a given tenant and + resource ever being stored, while still allowing for regeneration of a given + +Package: python-distilclient +Architecture: all +Depends: ${misc:Depends}, ${python:Depends}, python2.7, python-keystoneclient, python-requests +Provides: distilclient +Replaces: distilclient +Conflicts: distilclient +Description: Client interface for Distil project + Distil is a web app to provide easy interactions with ERP systems, by + exposing a configurable set of collection tools and transformers to make + usable billing data out of Ceilometer entries. + . + This package provides a client to interact with the Distil web app. diff --git a/debian/pydist-overrides b/debian/pydist-overrides new file mode 100644 index 0000000..f036229 --- /dev/null +++ b/debian/pydist-overrides @@ -0,0 +1,2 @@ +pyaml python-yaml +PyMySQL python-mysqldb diff --git a/debian/python-distil.install b/debian/python-distil.install new file mode 100644 index 0000000..c35996d --- /dev/null +++ b/debian/python-distil.install @@ -0,0 +1,2 @@ +usr/lib /usr +work-api/etc/distil/ /etc diff --git a/debian/python-distilclient.install b/debian/python-distilclient.install new file mode 100644 index 0000000..6df0b62 --- /dev/null +++ b/debian/python-distilclient.install @@ -0,0 +1,2 @@ +bin/distil usr/bin +client/*.py /usr/lib/python2.7/dist-packages/distilclient diff --git a/debian/rules b/debian/rules new file mode 100755 index 0000000..4de318f --- /dev/null +++ b/debian/rules @@ -0,0 +1,7 @@ +#!/usr/bin/make -f + +# This file was automatically generated by stdeb 0.6.0+git at +# Wed, 01 Jul 2015 11:32:23 +1200 + +%: + dh "$@" --with python2 --buildsystem=python_distutils diff --git a/debian/source/format b/debian/source/format new file mode 100644 index 0000000..89ae9db --- /dev/null +++ b/debian/source/format @@ -0,0 +1 @@ +3.0 (native) diff --git a/debian/source/options b/debian/source/options new file mode 100644 index 0000000..4d82e22 --- /dev/null +++ b/debian/source/options @@ -0,0 +1 @@ +extend-diff-ignore="\.egg-info" \ No newline at end of file diff --git a/distil/NoPickle.py b/distil/NoPickle.py new file mode 100644 index 0000000..6e11832 --- /dev/null +++ b/distil/NoPickle.py @@ -0,0 +1,16 @@ + + +class NoPickling(BaseException): + """Should not be pickling""" + + +class NoPickle(object): + + def __init__(self, *args, **kwargs): + pass + + def dump(self, value): + raise NoPickling("Pickling is not allowed!") + + def load(self, value): + raise NoPickling("Unpickling is not allowed!") diff --git a/distil/api/helpers.py b/distil/api/helpers.py index 97ba192..d84f38d 100644 --- a/distil/api/helpers.py +++ b/distil/api/helpers.py @@ -45,9 +45,17 @@ def must(*args, **kwargs): @decorator def returns_json(func, *args, **kwargs): + """Dumps content into a json and makes a response. + NOTE: If content is already a string assumes it is json.""" status, content = func(*args, **kwargs) + + if isinstance(content, str): + content_json = content + else: + content_json = json.dumps(content) + response = flask.make_response( - json.dumps(content), status) + content_json, status) response.headers['Content-type'] = 'application/json' return response @@ -73,7 +81,7 @@ def validate_tenant_id(tenant_id, session): """Tenant ID validation that check that the id you passed is valid, and that a tenant with this ID exists. - returns tenant query, or a tuple if validation failure.""" - if isinstance(tenant_id, unicode): + if isinstance(tenant_id, basestring): tenant_query = session.query(Tenant).\ filter(Tenant.id == tenant_id) if tenant_query.count() == 0: @@ -90,6 +98,23 @@ def require_admin(func, *args, **kwargs): if config.auth.get('authenticate_clients'): roles = flask.request.headers['X-Roles'].split(',') if 'admin' not in roles: - return flask.make_response(403, "Must be admin") + return flask.make_response("Must be admin", 403) + + return func(*args, **kwargs) + + +@decorator +def require_admin_or_owner(func, *args, **kwargs): + if config.auth.get('authenticate_clients'): + roles = flask.request.headers['X-Roles'].split(',') + tenant_id = flask.request.headers['X-tenant-id'] + json_tenant_id = (None if not flask.request.json + else flask.request.json['tenant']) + args_tenant_id = flask.request.args.get('tenant') + request_tenant_id = json_tenant_id or args_tenant_id + if 'admin' in roles or tenant_id == request_tenant_id: + return func(*args, **kwargs) + + return flask.make_response("Must be admin or the tenant owner.", 403) return func(*args, **kwargs) diff --git a/distil/api/web.py b/distil/api/web.py index 963a25d..d82a74b 100644 --- a/distil/api/web.py +++ b/distil/api/web.py @@ -13,25 +13,32 @@ # under the License. import flask +import hashlib +import re +from distil.NoPickle import NoPickle from flask import Flask, Blueprint from distil import database, config from distil.constants import iso_time, iso_date, dawn_of_time from distil.transformers import active_transformers as transformers from distil.rates import RatesFile -from distil.models import SalesOrder, _Last_Run +from distil.models import _Last_Run from distil.helpers import convert_to, reset_cache from distil.interface import Interface, timed -from sqlalchemy import create_engine, func +from sqlalchemy import create_engine from sqlalchemy.orm import scoped_session, create_session from sqlalchemy.pool import NullPool from sqlalchemy.exc import IntegrityError, OperationalError +# Fix the the multithread issue when using strptime, based on this link: +# stackoverflow.com/questions/2427240/thread-safe-equivalent-to-pythons-time-strptime # noqa +import _strptime from datetime import datetime, timedelta from decimal import Decimal import json import logging as log -from keystoneclient.middleware.auth_token import AuthProtocol as KeystoneMiddleware +from keystonemiddleware import auth_token from .helpers import returns_json, json_must, validate_tenant_id, require_admin +from .helpers import require_admin_or_owner from urlparse import urlparse @@ -39,10 +46,18 @@ engine = None Session = None +memcache = None + app = Blueprint("main", __name__) DEFAULT_TIMEZONE = "Pacific/Auckland" +RATES = None + +# Double confirm by: +# http://blog.namis.me/2012/02/14/python-strptime-is-not-thread-safe/ +dumy_call = datetime.strptime("2011-04-05 18:40:58.525996", + "%Y-%m-%d %H:%M:%S.%f") def get_app(conf): actual_app = Flask(__name__) @@ -65,6 +80,8 @@ def get_app(conf): format='%(asctime)s %(message)s') log.info("Billing API started.") + setup_memcache() + # if configured to authenticate clients, then wrap the # wsgi app in the keystone middleware. if config.auth.get('authenticate_clients'): @@ -77,11 +94,22 @@ def get_app(conf): 'auth_port': identity_url.port, 'auth_protocol': identity_url.scheme } - actual_app = KeystoneMiddleware(actual_app, conf) + actual_app = auth_token.AuthProtocol(actual_app, conf) return actual_app +def setup_memcache(): + if config.memcache['enabled']: + log.info("Memcache enabled.") + import memcache as memcached + global memcache + memcache = memcached.Client(config.memcache['addresses'], + pickler=NoPickle, unpickler=NoPickle) + else: + log.info("Memcache disabled.") + + @app.route("last_collected", methods=["GET"]) @returns_json @require_admin @@ -116,8 +144,13 @@ def filter_and_group(usage, usage_by_resource): # billing. # if we have a list of trust sources configured, then # discard everything not matching. - if trust_sources and u['source'] not in trust_sources: - log.warning('ignoring untrusted usage sample ' + + # NOTE(flwang): When posting samples by ceilometer REST API, it + # will use the format : + # so we need to use a regex to recognize it. + if (trust_sources and + all([not re.match(source, u['source']) + for source in trust_sources]) == True): + log.warning('Ignoring untrusted usage sample ' + 'from source `%s`' % u['source']) continue @@ -127,7 +160,7 @@ def filter_and_group(usage, usage_by_resource): def transform_and_insert(tenant, usage_by_resource, transformer, service, - meter_info, window_start, window_end, + mapping, window_start, window_end, db, timestamp): with timed("apply transformer + insert"): for res, entries in usage_by_resource.items(): @@ -136,14 +169,14 @@ def transform_and_insert(tenant, usage_by_resource, transformer, service, service, entries, window_start, window_end) if transformed: - res = meter_info.get('res_id_template', '%s') % res + res = mapping.get('res_id_template', '%s') % res - md_def = meter_info['metadata'] + md_def = mapping['metadata'] - db.insert_resource(tenant.id, res, meter_info['type'], + db.insert_resource(tenant.id, res, mapping['type'], timestamp, entries[-1], md_def) db.insert_usage(tenant.id, res, transformed, - meter_info['unit'], window_start, + mapping['unit'], window_start, window_end, timestamp) @@ -175,21 +208,21 @@ def collect_usage(tenant, db, session, resp, end): mappings = config.collection['meter_mappings'] - for meter_name, meter_info in mappings.items(): - usage = tenant.usage(meter_name, window_start, window_end) + for mapping in mappings: + usage = tenant.usage(mapping['meter'], window_start, window_end) usage_by_resource = {} - transformer = transformers[meter_info['transformer']]() + transformer = transformers[mapping['transformer']]() filter_and_group(usage, usage_by_resource) - if 'service' in meter_info: - service = meter_info['service'] + if 'service' in mapping: + service = mapping['service'] else: - service = meter_name + service = mapping['meter'] transform_and_insert(tenant, usage_by_resource, - transformer, service, meter_info, + transformer, service, mapping, window_start, window_end, db, timestamp) @@ -271,6 +304,7 @@ def run_usage_collection(): trace = traceback.format_exc() log.critical('Exception escaped! %s \nTrace: \n%s' % (e, trace)) + def make_serializable(obj): if isinstance(obj, list): return [make_serializable(x) for x in obj] @@ -282,9 +316,10 @@ def make_serializable(obj): return obj + @app.route("get_usage", methods=["GET"]) +@require_admin_or_owner @returns_json -@require_admin def get_usage(): """ Get raw aggregated usage for a tenant, in a given timespan. @@ -320,32 +355,117 @@ def get_usage(): log.info("parameter validation ok") + if memcache is not None: + key = make_key("raw_usage", tenant_id, start, end) + + data = memcache.get(key) + if data is not None: + log.info("Returning memcache raw data for %s in range: %s - %s" % + (tenant_id, start, end)) + return 200, data + + log.info("Calculating raw data for %s in range: %s - %s" % + (tenant_id, start, end)) + # aggregate usage usage = db.usage(start, end, tenant_id) tenant_dict = build_tenant_dict(valid_tenant, usage, db) - return 200, {'usage': make_serializable(tenant_dict)} + response_json = json.dumps({'usage': make_serializable(tenant_dict)}) + + if memcache is not None: + memcache.set(key, response_json) + + return 200, response_json + + +@app.route("get_rated", methods=["GET"]) +@require_admin_or_owner +@returns_json +def get_rated(): + """ + Get rated aggregated usage for a tenant, in a given timespan. + Rates used are those at the 'start' of the timespan. + -tenant_id: tenant to get data for. + -start: a given start for the range. + -end: a given end for the range, defaults to now. + """ + tenant_id = flask.request.args.get('tenant', None) + start = flask.request.args.get('start', None) + end = flask.request.args.get('end', None) + + try: + if start is not None: + try: + start = datetime.strptime(start, iso_date) + except ValueError: + start = datetime.strptime(start, iso_time) + else: + return 400, {"missing parameter": {"start": "start date" + + " in format: y-m-d"}} + if not end: + end = datetime.utcnow() + else: + try: + end = datetime.strptime(end, iso_date) + except ValueError: + end = datetime.strptime(end, iso_time) + except ValueError: + return 400, { + "errors": ["'end' date given needs to be in format: " + + "y-m-d, or y-m-dTH:M:S"]} + + if end <= start: + return 400, {"errors": ["end date must be greater than start."]} + + session = Session() + + valid_tenant = validate_tenant_id(tenant_id, session) + if isinstance(valid_tenant, tuple): + return valid_tenant + + if memcache is not None: + key = make_key("rated_usage", valid_tenant.id, start, end) + + data = memcache.get(key) + if data is not None: + log.info("Returning memcache rated data for %s in range: %s - %s" % + (valid_tenant.id, start, end)) + return 200, data + + log.info("Calculating rated data for %s in range: %s - %s" % + (valid_tenant.id, start, end)) + + tenant_dict = calculate_rated_data(valid_tenant, start, end, session) + + response_json = json.dumps({'usage': tenant_dict}) + + if memcache is not None: + memcache.set(key, response_json) + + return 200, response_json + + +def make_key(api_call, tenant_id, start, end): + call_info = [config.memcache['key_prefix'], api_call, + tenant_id, str(start), str(end)] + return hashlib.sha256(str(call_info)).hexdigest() def build_tenant_dict(tenant, entries, db): """Builds a dict structure for a given tenant.""" - tenant_dict = {'name': tenant.name, 'tenant_id': tenant.id, - 'resources': {}} + tenant_dict = {'name': tenant.name, 'tenant_id': tenant.id} + + all_resource_ids = {entry.resource_id for entry in entries} + tenant_dict['resources'] = db.get_resources(all_resource_ids) for entry in entries: service = {'name': entry.service, 'volume': entry.volume, - 'unit': entry.unit} + 'unit': entry.unit} - if (entry.resource_id not in tenant_dict['resources']): - resource = db.get_resource_metadata(entry.resource_id) - - resource['services'] = [service] - - tenant_dict['resources'][entry.resource_id] = resource - - else: - resource = tenant_dict['resources'][entry.resource_id] - resource['services'].append(service) + resource = tenant_dict['resources'][entry.resource_id] + service_list = resource.setdefault('services', []) + service_list.append(service) return tenant_dict @@ -386,226 +506,26 @@ def add_costs_for_tenant(tenant, RatesManager): return tenant -def generate_sales_order(draft, tenant_id, end): - """Generates a sales order dict, and unless draft is true, - creates a database entry for sales_order.""" - session = Session() +def calculate_rated_data(tenant, start, end, session): + """Calculate a rated data dict from the given range.""" + db = database.Database(session) - valid_tenant = validate_tenant_id(tenant_id, session) - if isinstance(valid_tenant, tuple): - return valid_tenant + global RATES + if not RATES: + RATES = RatesFile(config.rates_config) - rates = RatesFile(config.rates_config) - - # Get the last sales order for this tenant, to establish - # the proper ranging - start = session.query(func.max(SalesOrder.end).label('end')).\ - filter(SalesOrder.tenant_id == tenant_id).first().end - if not start: - start = dawn_of_time - - # these coditionals need work, also some way to - # ensure all given timedate values are in UTC? - if end <= start: - return 400, {"errors": ["end date must be greater than " + - "the end of the last sales order range."]} - if end > datetime.utcnow(): - return 400, {"errors": ["end date cannot be a future date."]} - - usage = db.usage(start, end, tenant_id) - - session.begin() - if not draft: - order = SalesOrder(tenant_id=tenant_id, start=start, end=end) - session.add(order) - - try: - # Commit the record before we generate the bill, to mark this as a - # billed region of data. Avoids race conditions by marking a tenant - # BEFORE we start to generate the data for it. - session.commit() - - # Transform the query result into a billable dict. - tenant_dict = build_tenant_dict(valid_tenant, usage, db) - tenant_dict = add_costs_for_tenant(tenant_dict, rates) - - # add sales order range: - tenant_dict['start'] = str(start) - tenant_dict['end'] = str(end) - session.close() - if not draft: - log.info("Sales Order #%s Generated for %s in range: %s - %s" % - (order.id, tenant_id, start, end)) - return 200, tenant_dict - except (IntegrityError, OperationalError): - session.rollback() - session.close() - log.warning("IntegrityError creating sales-order for " + - "%s %s in range: %s - %s " % - (valid_tenant.name, valid_tenant.id, start, end)) - return 400, {"id": tenant_id, - "error": "IntegrityError, existing sales_order overlap."} - - -def regenerate_sales_order(tenant_id, target): - """Finds a sales order entry nearest to the target, - and returns a salesorder dict based on the entry.""" - session = Session() - db = database.Database(session) - rates = RatesFile(config.rates_config) - - valid_tenant = validate_tenant_id(tenant_id, session) - if isinstance(valid_tenant, tuple): - return valid_tenant - - try: - sales_order = db.get_sales_orders(tenant_id, target, target)[0] - except IndexError: - return 400, {"errors": ["Given date not in existing sales orders."]} - - usage = db.usage(sales_order.start, sales_order.end, tenant_id) + usage = db.usage(start, end, tenant.id) # Transform the query result into a billable dict. - tenant_dict = build_tenant_dict(valid_tenant, usage, db) - tenant_dict = add_costs_for_tenant(tenant_dict, rates) + tenant_dict = build_tenant_dict(tenant, usage, db) + tenant_dict = add_costs_for_tenant(tenant_dict, RATES) # add sales order range: - tenant_dict['start'] = str(sales_order.start) - tenant_dict['end'] = str(sales_order.end) + tenant_dict['start'] = str(start) + tenant_dict['end'] = str(end) - return 200, tenant_dict - - -def regenerate_sales_order_range(tenant_id, start, end): - """For all sales orders in a given range, generate sales order dicts, - and return them.""" - session = Session() - db = database.Database(session) - rates = RatesFile(config.rates_config) - - valid_tenant = validate_tenant_id(tenant_id, session) - if isinstance(valid_tenant, tuple): - return valid_tenant - - sales_orders = db.get_sales_orders(tenant_id, start, end) - - tenants = [] - for sales_order in sales_orders: - usage = db.usage(sales_order.start, sales_order.end, tenant_id) - - # Transform the query result into a billable dict. - tenant_dict = build_tenant_dict(valid_tenant, usage, db) - tenant_dict = add_costs_for_tenant(tenant_dict, rates) - - # add sales order range: - tenant_dict['start'] = str(sales_order.start) - tenant_dict['end'] = str(sales_order.end) - - tenants.append(tenant_dict) - - return 200, tenants - - -@app.route("sales_order", methods=["POST"]) -@require_admin -@json_must() -@returns_json -def run_sales_order_generation(): - """Generates a sales order for the given tenant. - -end: a given end date, or uses default""" - tenant_id = flask.request.json.get("tenant", None) - end = flask.request.json.get("end", None) - if not end: - # Today, the beginning of. - end = datetime.utcnow().\ - replace(hour=0, minute=0, second=0, microsecond=0) - else: - try: - end = datetime.strptime(end, iso_date) - except ValueError: - return 400, {"errors": ["'end' date given needs to be in format:" + - " y-m-d"]} - - return generate_sales_order(False, tenant_id, end) - - -@app.route("sales_draft", methods=["POST"]) -@require_admin -@json_must() -@returns_json -def run_sales_draft_generation(): - """Generates a sales draft for the given tenant. - -end: a given end datetime, or uses default""" - tenant_id = flask.request.json.get("tenant", None) - end = flask.request.json.get("end", None) - - if not end: - end = datetime.utcnow() - else: - try: - end = datetime.strptime(end, iso_date) - except ValueError: - try: - end = datetime.strptime(end, iso_time) - except ValueError: - return 400, { - "errors": ["'end' date given needs to be in format: " + - "y-m-d, or y-m-dTH:M:S"]} - - return generate_sales_order(True, tenant_id, end) - - -@app.route("sales_historic", methods=["POST"]) -@require_admin -@json_must() -@returns_json -def run_sales_historic_generation(): - """Returns the sales order that intersects with the given target date. - -target: a given target date""" - tenant_id = flask.request.json.get("tenant", None) - target = flask.request.json.get("date", None) - - if target is not None: - try: - target = datetime.strptime(target, iso_date) - except ValueError: - return 400, {"errors": ["date given needs to be in format: " + - "y-m-d"]} - else: - return 400, {"missing parameter": {"date": "target date in format: " + - "y-m-d"}} - - return regenerate_sales_order(tenant_id, target) - - -@app.route("sales_range", methods=["POST"]) -@require_admin -@json_must() -@returns_json -def run_sales_historic_range_generation(): - """Returns the sales orders that intersect with the given date range. - -start: a given start for the range. - -end: a given end for the range, defaults to now.""" - tenant_id = flask.request.json.get("tenant", None) - start = flask.request.json.get("start", None) - end = flask.request.json.get("end", None) - - try: - if start is not None: - start = datetime.strptime(start, iso_date) - else: - return 400, {"missing parameter": {"start": "start date" + - " in format: y-m-d"}} - if end is not None: - end = datetime.strptime(end, iso_date) - else: - end = datetime.utcnow() - except ValueError: - return 400, {"errors": ["dates given need to be in format: " + - "y-m-d"]} - - return regenerate_sales_order_range(tenant_id, start, end) + return tenant_dict if __name__ == '__main__': diff --git a/distil/config.py b/distil/config.py index 58f6561..f38ff59 100644 --- a/distil/config.py +++ b/distil/config.py @@ -15,6 +15,7 @@ # This is simply a namespace for global config storage main = None rates_config = None +memcache = None auth = None collection = None transformers = None @@ -25,6 +26,14 @@ def setup_config(conf): main = conf['main'] global rates_config rates_config = conf['rates_config'] + + # special case to avoid issues with older configs + try: + global memcache + memcache = conf['memcache'] + except KeyError: + memcache = {'enabled': False} + global auth auth = conf['auth'] global collection diff --git a/distil/constants.py b/distil/constants.py index 8514084..b374b38 100644 --- a/distil/constants.py +++ b/distil/constants.py @@ -32,7 +32,7 @@ iso_time = "%Y-%m-%dT%H:%M:%S" iso_date = "%Y-%m-%d" dawn_of_time = datetime(2014, 4, 1) -# VM states: +# VM states (SOON TO BE REMOVED): states = {'active': 1, 'building': 2, 'paused': 3, diff --git a/distil/database.py b/distil/database.py index 90040fa..67e38ba 100644 --- a/distil/database.py +++ b/distil/database.py @@ -111,11 +111,11 @@ class Database(object): return query - def get_resource_metadata(self, resource_id): - """Gets the metadata for a resource and loads it into a dict.""" - info = self.session.query(Resource.info).\ - filter(Resource.id == resource_id) - return json.loads(info[0].info) + def get_resources(self, resource_id_list): + """Gets resource metadata in bulk.""" + query = self.session.query(Resource.id, Resource.info).\ + filter(Resource.id.in_(resource_id_list)) + return {row.id: json.loads(row.info) for row in query} def get_sales_orders(self, tenant_id, start, end): """Returns a query with all sales orders diff --git a/distil/helpers.py b/distil/helpers.py index 6aa6430..00bd13a 100644 --- a/distil/helpers.py +++ b/distil/helpers.py @@ -12,8 +12,8 @@ # License for the specific language governing permissions and limitations # under the License. -from novaclient.v1_1 import client -from cinderclient.v1 import client as cinderclient +from novaclient import client as novaclient +from cinderclient.v2 import client as cinderclient from decimal import Decimal import config import math @@ -30,12 +30,15 @@ def reset_cache(): def flavor_name(f_id): """Grabs the correct flavor name from Nova given the correct ID.""" + _client_class = novaclient.get_client_class(2) + if f_id not in cache['flavors']: - nova = client.Client( + nova = _client_class( config.auth['username'], config.auth['password'], config.auth['default_tenant'], config.auth['end_point'], + region_name=config.main['region'], insecure=config.auth['insecure']) cache['flavors'][f_id] = nova.flavors.get(f_id).name @@ -49,6 +52,7 @@ def volume_type(volume_type): config.auth['password'], config.auth['default_tenant'], config.auth['end_point'], + region=config.main['region'], insecure=config.auth['insecure']) for vtype in cinder.volume_types.list(): diff --git a/distil/tests/__init__.py b/distil/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/distil/tests/unit/__init__.py b/distil/tests/unit/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/data/ceilometer_json.py b/distil/tests/unit/data/ceilometer_json.py similarity index 100% rename from tests/data/ceilometer_json.py rename to distil/tests/unit/data/ceilometer_json.py diff --git a/tests/data/map_fixture_0.json b/distil/tests/unit/data/map_fixture_0.json similarity index 100% rename from tests/data/map_fixture_0.json rename to distil/tests/unit/data/map_fixture_0.json diff --git a/tests/data/map_fixture_1.json b/distil/tests/unit/data/map_fixture_1.json similarity index 100% rename from tests/data/map_fixture_1.json rename to distil/tests/unit/data/map_fixture_1.json diff --git a/tests/data/map_fixture_10.json b/distil/tests/unit/data/map_fixture_10.json similarity index 100% rename from tests/data/map_fixture_10.json rename to distil/tests/unit/data/map_fixture_10.json diff --git a/tests/data/map_fixture_100.json b/distil/tests/unit/data/map_fixture_100.json similarity index 100% rename from tests/data/map_fixture_100.json rename to distil/tests/unit/data/map_fixture_100.json diff --git a/tests/data/map_fixture_101.json b/distil/tests/unit/data/map_fixture_101.json similarity index 100% rename from tests/data/map_fixture_101.json rename to distil/tests/unit/data/map_fixture_101.json diff --git a/tests/data/map_fixture_102.json b/distil/tests/unit/data/map_fixture_102.json similarity index 100% rename from tests/data/map_fixture_102.json rename to distil/tests/unit/data/map_fixture_102.json diff --git a/tests/data/map_fixture_103.json b/distil/tests/unit/data/map_fixture_103.json similarity index 100% rename from tests/data/map_fixture_103.json rename to distil/tests/unit/data/map_fixture_103.json diff --git a/tests/data/map_fixture_104.json b/distil/tests/unit/data/map_fixture_104.json similarity index 100% rename from tests/data/map_fixture_104.json rename to distil/tests/unit/data/map_fixture_104.json diff --git a/tests/data/map_fixture_105.json b/distil/tests/unit/data/map_fixture_105.json similarity index 100% rename from tests/data/map_fixture_105.json rename to distil/tests/unit/data/map_fixture_105.json diff --git a/tests/data/map_fixture_106.json b/distil/tests/unit/data/map_fixture_106.json similarity index 100% rename from tests/data/map_fixture_106.json rename to distil/tests/unit/data/map_fixture_106.json diff --git a/tests/data/map_fixture_107.json b/distil/tests/unit/data/map_fixture_107.json similarity index 100% rename from tests/data/map_fixture_107.json rename to distil/tests/unit/data/map_fixture_107.json diff --git a/tests/data/map_fixture_11.json b/distil/tests/unit/data/map_fixture_11.json similarity index 100% rename from tests/data/map_fixture_11.json rename to distil/tests/unit/data/map_fixture_11.json diff --git a/tests/data/map_fixture_12.json b/distil/tests/unit/data/map_fixture_12.json similarity index 100% rename from tests/data/map_fixture_12.json rename to distil/tests/unit/data/map_fixture_12.json diff --git a/tests/data/map_fixture_13.json b/distil/tests/unit/data/map_fixture_13.json similarity index 100% rename from tests/data/map_fixture_13.json rename to distil/tests/unit/data/map_fixture_13.json diff --git a/tests/data/map_fixture_14.json b/distil/tests/unit/data/map_fixture_14.json similarity index 100% rename from tests/data/map_fixture_14.json rename to distil/tests/unit/data/map_fixture_14.json diff --git a/tests/data/map_fixture_15.json b/distil/tests/unit/data/map_fixture_15.json similarity index 100% rename from tests/data/map_fixture_15.json rename to distil/tests/unit/data/map_fixture_15.json diff --git a/tests/data/map_fixture_16.json b/distil/tests/unit/data/map_fixture_16.json similarity index 100% rename from tests/data/map_fixture_16.json rename to distil/tests/unit/data/map_fixture_16.json diff --git a/tests/data/map_fixture_17.json b/distil/tests/unit/data/map_fixture_17.json similarity index 100% rename from tests/data/map_fixture_17.json rename to distil/tests/unit/data/map_fixture_17.json diff --git a/tests/data/map_fixture_18.json b/distil/tests/unit/data/map_fixture_18.json similarity index 100% rename from tests/data/map_fixture_18.json rename to distil/tests/unit/data/map_fixture_18.json diff --git a/tests/data/map_fixture_19.json b/distil/tests/unit/data/map_fixture_19.json similarity index 100% rename from tests/data/map_fixture_19.json rename to distil/tests/unit/data/map_fixture_19.json diff --git a/tests/data/map_fixture_2.json b/distil/tests/unit/data/map_fixture_2.json similarity index 100% rename from tests/data/map_fixture_2.json rename to distil/tests/unit/data/map_fixture_2.json diff --git a/tests/data/map_fixture_20.json b/distil/tests/unit/data/map_fixture_20.json similarity index 100% rename from tests/data/map_fixture_20.json rename to distil/tests/unit/data/map_fixture_20.json diff --git a/tests/data/map_fixture_21.json b/distil/tests/unit/data/map_fixture_21.json similarity index 100% rename from tests/data/map_fixture_21.json rename to distil/tests/unit/data/map_fixture_21.json diff --git a/tests/data/map_fixture_22.json b/distil/tests/unit/data/map_fixture_22.json similarity index 100% rename from tests/data/map_fixture_22.json rename to distil/tests/unit/data/map_fixture_22.json diff --git a/tests/data/map_fixture_23.json b/distil/tests/unit/data/map_fixture_23.json similarity index 100% rename from tests/data/map_fixture_23.json rename to distil/tests/unit/data/map_fixture_23.json diff --git a/tests/data/map_fixture_24.json b/distil/tests/unit/data/map_fixture_24.json similarity index 100% rename from tests/data/map_fixture_24.json rename to distil/tests/unit/data/map_fixture_24.json diff --git a/tests/data/map_fixture_25.json b/distil/tests/unit/data/map_fixture_25.json similarity index 100% rename from tests/data/map_fixture_25.json rename to distil/tests/unit/data/map_fixture_25.json diff --git a/tests/data/map_fixture_26.json b/distil/tests/unit/data/map_fixture_26.json similarity index 100% rename from tests/data/map_fixture_26.json rename to distil/tests/unit/data/map_fixture_26.json diff --git a/tests/data/map_fixture_27.json b/distil/tests/unit/data/map_fixture_27.json similarity index 100% rename from tests/data/map_fixture_27.json rename to distil/tests/unit/data/map_fixture_27.json diff --git a/tests/data/map_fixture_28.json b/distil/tests/unit/data/map_fixture_28.json similarity index 100% rename from tests/data/map_fixture_28.json rename to distil/tests/unit/data/map_fixture_28.json diff --git a/tests/data/map_fixture_29.json b/distil/tests/unit/data/map_fixture_29.json similarity index 100% rename from tests/data/map_fixture_29.json rename to distil/tests/unit/data/map_fixture_29.json diff --git a/tests/data/map_fixture_3.json b/distil/tests/unit/data/map_fixture_3.json similarity index 100% rename from tests/data/map_fixture_3.json rename to distil/tests/unit/data/map_fixture_3.json diff --git a/tests/data/map_fixture_30.json b/distil/tests/unit/data/map_fixture_30.json similarity index 100% rename from tests/data/map_fixture_30.json rename to distil/tests/unit/data/map_fixture_30.json diff --git a/tests/data/map_fixture_31.json b/distil/tests/unit/data/map_fixture_31.json similarity index 100% rename from tests/data/map_fixture_31.json rename to distil/tests/unit/data/map_fixture_31.json diff --git a/tests/data/map_fixture_32.json b/distil/tests/unit/data/map_fixture_32.json similarity index 100% rename from tests/data/map_fixture_32.json rename to distil/tests/unit/data/map_fixture_32.json diff --git a/tests/data/map_fixture_33.json b/distil/tests/unit/data/map_fixture_33.json similarity index 100% rename from tests/data/map_fixture_33.json rename to distil/tests/unit/data/map_fixture_33.json diff --git a/tests/data/map_fixture_34.json b/distil/tests/unit/data/map_fixture_34.json similarity index 100% rename from tests/data/map_fixture_34.json rename to distil/tests/unit/data/map_fixture_34.json diff --git a/tests/data/map_fixture_35.json b/distil/tests/unit/data/map_fixture_35.json similarity index 100% rename from tests/data/map_fixture_35.json rename to distil/tests/unit/data/map_fixture_35.json diff --git a/tests/data/map_fixture_36.json b/distil/tests/unit/data/map_fixture_36.json similarity index 100% rename from tests/data/map_fixture_36.json rename to distil/tests/unit/data/map_fixture_36.json diff --git a/tests/data/map_fixture_37.json b/distil/tests/unit/data/map_fixture_37.json similarity index 100% rename from tests/data/map_fixture_37.json rename to distil/tests/unit/data/map_fixture_37.json diff --git a/tests/data/map_fixture_38.json b/distil/tests/unit/data/map_fixture_38.json similarity index 100% rename from tests/data/map_fixture_38.json rename to distil/tests/unit/data/map_fixture_38.json diff --git a/tests/data/map_fixture_39.json b/distil/tests/unit/data/map_fixture_39.json similarity index 100% rename from tests/data/map_fixture_39.json rename to distil/tests/unit/data/map_fixture_39.json diff --git a/tests/data/map_fixture_4.json b/distil/tests/unit/data/map_fixture_4.json similarity index 100% rename from tests/data/map_fixture_4.json rename to distil/tests/unit/data/map_fixture_4.json diff --git a/tests/data/map_fixture_40.json b/distil/tests/unit/data/map_fixture_40.json similarity index 100% rename from tests/data/map_fixture_40.json rename to distil/tests/unit/data/map_fixture_40.json diff --git a/tests/data/map_fixture_41.json b/distil/tests/unit/data/map_fixture_41.json similarity index 100% rename from tests/data/map_fixture_41.json rename to distil/tests/unit/data/map_fixture_41.json diff --git a/tests/data/map_fixture_42.json b/distil/tests/unit/data/map_fixture_42.json similarity index 100% rename from tests/data/map_fixture_42.json rename to distil/tests/unit/data/map_fixture_42.json diff --git a/tests/data/map_fixture_43.json b/distil/tests/unit/data/map_fixture_43.json similarity index 100% rename from tests/data/map_fixture_43.json rename to distil/tests/unit/data/map_fixture_43.json diff --git a/tests/data/map_fixture_44.json b/distil/tests/unit/data/map_fixture_44.json similarity index 100% rename from tests/data/map_fixture_44.json rename to distil/tests/unit/data/map_fixture_44.json diff --git a/tests/data/map_fixture_45.json b/distil/tests/unit/data/map_fixture_45.json similarity index 100% rename from tests/data/map_fixture_45.json rename to distil/tests/unit/data/map_fixture_45.json diff --git a/tests/data/map_fixture_46.json b/distil/tests/unit/data/map_fixture_46.json similarity index 100% rename from tests/data/map_fixture_46.json rename to distil/tests/unit/data/map_fixture_46.json diff --git a/tests/data/map_fixture_47.json b/distil/tests/unit/data/map_fixture_47.json similarity index 100% rename from tests/data/map_fixture_47.json rename to distil/tests/unit/data/map_fixture_47.json diff --git a/tests/data/map_fixture_48.json b/distil/tests/unit/data/map_fixture_48.json similarity index 100% rename from tests/data/map_fixture_48.json rename to distil/tests/unit/data/map_fixture_48.json diff --git a/tests/data/map_fixture_49.json b/distil/tests/unit/data/map_fixture_49.json similarity index 100% rename from tests/data/map_fixture_49.json rename to distil/tests/unit/data/map_fixture_49.json diff --git a/tests/data/map_fixture_5.json b/distil/tests/unit/data/map_fixture_5.json similarity index 100% rename from tests/data/map_fixture_5.json rename to distil/tests/unit/data/map_fixture_5.json diff --git a/tests/data/map_fixture_50.json b/distil/tests/unit/data/map_fixture_50.json similarity index 100% rename from tests/data/map_fixture_50.json rename to distil/tests/unit/data/map_fixture_50.json diff --git a/tests/data/map_fixture_51.json b/distil/tests/unit/data/map_fixture_51.json similarity index 100% rename from tests/data/map_fixture_51.json rename to distil/tests/unit/data/map_fixture_51.json diff --git a/tests/data/map_fixture_52.json b/distil/tests/unit/data/map_fixture_52.json similarity index 100% rename from tests/data/map_fixture_52.json rename to distil/tests/unit/data/map_fixture_52.json diff --git a/tests/data/map_fixture_53.json b/distil/tests/unit/data/map_fixture_53.json similarity index 100% rename from tests/data/map_fixture_53.json rename to distil/tests/unit/data/map_fixture_53.json diff --git a/tests/data/map_fixture_54.json b/distil/tests/unit/data/map_fixture_54.json similarity index 100% rename from tests/data/map_fixture_54.json rename to distil/tests/unit/data/map_fixture_54.json diff --git a/tests/data/map_fixture_55.json b/distil/tests/unit/data/map_fixture_55.json similarity index 100% rename from tests/data/map_fixture_55.json rename to distil/tests/unit/data/map_fixture_55.json diff --git a/tests/data/map_fixture_56.json b/distil/tests/unit/data/map_fixture_56.json similarity index 100% rename from tests/data/map_fixture_56.json rename to distil/tests/unit/data/map_fixture_56.json diff --git a/tests/data/map_fixture_57.json b/distil/tests/unit/data/map_fixture_57.json similarity index 100% rename from tests/data/map_fixture_57.json rename to distil/tests/unit/data/map_fixture_57.json diff --git a/tests/data/map_fixture_58.json b/distil/tests/unit/data/map_fixture_58.json similarity index 100% rename from tests/data/map_fixture_58.json rename to distil/tests/unit/data/map_fixture_58.json diff --git a/tests/data/map_fixture_59.json b/distil/tests/unit/data/map_fixture_59.json similarity index 100% rename from tests/data/map_fixture_59.json rename to distil/tests/unit/data/map_fixture_59.json diff --git a/tests/data/map_fixture_6.json b/distil/tests/unit/data/map_fixture_6.json similarity index 100% rename from tests/data/map_fixture_6.json rename to distil/tests/unit/data/map_fixture_6.json diff --git a/tests/data/map_fixture_60.json b/distil/tests/unit/data/map_fixture_60.json similarity index 100% rename from tests/data/map_fixture_60.json rename to distil/tests/unit/data/map_fixture_60.json diff --git a/tests/data/map_fixture_61.json b/distil/tests/unit/data/map_fixture_61.json similarity index 100% rename from tests/data/map_fixture_61.json rename to distil/tests/unit/data/map_fixture_61.json diff --git a/tests/data/map_fixture_62.json b/distil/tests/unit/data/map_fixture_62.json similarity index 100% rename from tests/data/map_fixture_62.json rename to distil/tests/unit/data/map_fixture_62.json diff --git a/tests/data/map_fixture_63.json b/distil/tests/unit/data/map_fixture_63.json similarity index 100% rename from tests/data/map_fixture_63.json rename to distil/tests/unit/data/map_fixture_63.json diff --git a/tests/data/map_fixture_64.json b/distil/tests/unit/data/map_fixture_64.json similarity index 100% rename from tests/data/map_fixture_64.json rename to distil/tests/unit/data/map_fixture_64.json diff --git a/tests/data/map_fixture_65.json b/distil/tests/unit/data/map_fixture_65.json similarity index 100% rename from tests/data/map_fixture_65.json rename to distil/tests/unit/data/map_fixture_65.json diff --git a/tests/data/map_fixture_66.json b/distil/tests/unit/data/map_fixture_66.json similarity index 100% rename from tests/data/map_fixture_66.json rename to distil/tests/unit/data/map_fixture_66.json diff --git a/tests/data/map_fixture_67.json b/distil/tests/unit/data/map_fixture_67.json similarity index 100% rename from tests/data/map_fixture_67.json rename to distil/tests/unit/data/map_fixture_67.json diff --git a/tests/data/map_fixture_68.json b/distil/tests/unit/data/map_fixture_68.json similarity index 100% rename from tests/data/map_fixture_68.json rename to distil/tests/unit/data/map_fixture_68.json diff --git a/tests/data/map_fixture_69.json b/distil/tests/unit/data/map_fixture_69.json similarity index 100% rename from tests/data/map_fixture_69.json rename to distil/tests/unit/data/map_fixture_69.json diff --git a/tests/data/map_fixture_7.json b/distil/tests/unit/data/map_fixture_7.json similarity index 100% rename from tests/data/map_fixture_7.json rename to distil/tests/unit/data/map_fixture_7.json diff --git a/tests/data/map_fixture_70.json b/distil/tests/unit/data/map_fixture_70.json similarity index 100% rename from tests/data/map_fixture_70.json rename to distil/tests/unit/data/map_fixture_70.json diff --git a/tests/data/map_fixture_71.json b/distil/tests/unit/data/map_fixture_71.json similarity index 100% rename from tests/data/map_fixture_71.json rename to distil/tests/unit/data/map_fixture_71.json diff --git a/tests/data/map_fixture_72.json b/distil/tests/unit/data/map_fixture_72.json similarity index 100% rename from tests/data/map_fixture_72.json rename to distil/tests/unit/data/map_fixture_72.json diff --git a/tests/data/map_fixture_73.json b/distil/tests/unit/data/map_fixture_73.json similarity index 100% rename from tests/data/map_fixture_73.json rename to distil/tests/unit/data/map_fixture_73.json diff --git a/tests/data/map_fixture_74.json b/distil/tests/unit/data/map_fixture_74.json similarity index 100% rename from tests/data/map_fixture_74.json rename to distil/tests/unit/data/map_fixture_74.json diff --git a/tests/data/map_fixture_75.json b/distil/tests/unit/data/map_fixture_75.json similarity index 100% rename from tests/data/map_fixture_75.json rename to distil/tests/unit/data/map_fixture_75.json diff --git a/tests/data/map_fixture_76.json b/distil/tests/unit/data/map_fixture_76.json similarity index 100% rename from tests/data/map_fixture_76.json rename to distil/tests/unit/data/map_fixture_76.json diff --git a/tests/data/map_fixture_77.json b/distil/tests/unit/data/map_fixture_77.json similarity index 100% rename from tests/data/map_fixture_77.json rename to distil/tests/unit/data/map_fixture_77.json diff --git a/tests/data/map_fixture_78.json b/distil/tests/unit/data/map_fixture_78.json similarity index 100% rename from tests/data/map_fixture_78.json rename to distil/tests/unit/data/map_fixture_78.json diff --git a/tests/data/map_fixture_79.json b/distil/tests/unit/data/map_fixture_79.json similarity index 100% rename from tests/data/map_fixture_79.json rename to distil/tests/unit/data/map_fixture_79.json diff --git a/tests/data/map_fixture_8.json b/distil/tests/unit/data/map_fixture_8.json similarity index 100% rename from tests/data/map_fixture_8.json rename to distil/tests/unit/data/map_fixture_8.json diff --git a/tests/data/map_fixture_80.json b/distil/tests/unit/data/map_fixture_80.json similarity index 100% rename from tests/data/map_fixture_80.json rename to distil/tests/unit/data/map_fixture_80.json diff --git a/tests/data/map_fixture_81.json b/distil/tests/unit/data/map_fixture_81.json similarity index 100% rename from tests/data/map_fixture_81.json rename to distil/tests/unit/data/map_fixture_81.json diff --git a/tests/data/map_fixture_82.json b/distil/tests/unit/data/map_fixture_82.json similarity index 100% rename from tests/data/map_fixture_82.json rename to distil/tests/unit/data/map_fixture_82.json diff --git a/tests/data/map_fixture_83.json b/distil/tests/unit/data/map_fixture_83.json similarity index 100% rename from tests/data/map_fixture_83.json rename to distil/tests/unit/data/map_fixture_83.json diff --git a/tests/data/map_fixture_84.json b/distil/tests/unit/data/map_fixture_84.json similarity index 100% rename from tests/data/map_fixture_84.json rename to distil/tests/unit/data/map_fixture_84.json diff --git a/tests/data/map_fixture_85.json b/distil/tests/unit/data/map_fixture_85.json similarity index 100% rename from tests/data/map_fixture_85.json rename to distil/tests/unit/data/map_fixture_85.json diff --git a/tests/data/map_fixture_86.json b/distil/tests/unit/data/map_fixture_86.json similarity index 100% rename from tests/data/map_fixture_86.json rename to distil/tests/unit/data/map_fixture_86.json diff --git a/tests/data/map_fixture_87.json b/distil/tests/unit/data/map_fixture_87.json similarity index 100% rename from tests/data/map_fixture_87.json rename to distil/tests/unit/data/map_fixture_87.json diff --git a/tests/data/map_fixture_88.json b/distil/tests/unit/data/map_fixture_88.json similarity index 100% rename from tests/data/map_fixture_88.json rename to distil/tests/unit/data/map_fixture_88.json diff --git a/tests/data/map_fixture_89.json b/distil/tests/unit/data/map_fixture_89.json similarity index 100% rename from tests/data/map_fixture_89.json rename to distil/tests/unit/data/map_fixture_89.json diff --git a/tests/data/map_fixture_9.json b/distil/tests/unit/data/map_fixture_9.json similarity index 100% rename from tests/data/map_fixture_9.json rename to distil/tests/unit/data/map_fixture_9.json diff --git a/tests/data/map_fixture_90.json b/distil/tests/unit/data/map_fixture_90.json similarity index 100% rename from tests/data/map_fixture_90.json rename to distil/tests/unit/data/map_fixture_90.json diff --git a/tests/data/map_fixture_91.json b/distil/tests/unit/data/map_fixture_91.json similarity index 100% rename from tests/data/map_fixture_91.json rename to distil/tests/unit/data/map_fixture_91.json diff --git a/tests/data/map_fixture_92.json b/distil/tests/unit/data/map_fixture_92.json similarity index 100% rename from tests/data/map_fixture_92.json rename to distil/tests/unit/data/map_fixture_92.json diff --git a/tests/data/map_fixture_93.json b/distil/tests/unit/data/map_fixture_93.json similarity index 100% rename from tests/data/map_fixture_93.json rename to distil/tests/unit/data/map_fixture_93.json diff --git a/tests/data/map_fixture_94.json b/distil/tests/unit/data/map_fixture_94.json similarity index 100% rename from tests/data/map_fixture_94.json rename to distil/tests/unit/data/map_fixture_94.json diff --git a/tests/data/map_fixture_95.json b/distil/tests/unit/data/map_fixture_95.json similarity index 100% rename from tests/data/map_fixture_95.json rename to distil/tests/unit/data/map_fixture_95.json diff --git a/tests/data/map_fixture_96.json b/distil/tests/unit/data/map_fixture_96.json similarity index 100% rename from tests/data/map_fixture_96.json rename to distil/tests/unit/data/map_fixture_96.json diff --git a/tests/data/map_fixture_97.json b/distil/tests/unit/data/map_fixture_97.json similarity index 100% rename from tests/data/map_fixture_97.json rename to distil/tests/unit/data/map_fixture_97.json diff --git a/tests/data/map_fixture_98.json b/distil/tests/unit/data/map_fixture_98.json similarity index 100% rename from tests/data/map_fixture_98.json rename to distil/tests/unit/data/map_fixture_98.json diff --git a/tests/data/map_fixture_99.json b/distil/tests/unit/data/map_fixture_99.json similarity index 100% rename from tests/data/map_fixture_99.json rename to distil/tests/unit/data/map_fixture_99.json diff --git a/tests/data/resources.json b/distil/tests/unit/data/resources.json similarity index 100% rename from tests/data/resources.json rename to distil/tests/unit/data/resources.json diff --git a/tests/data_samples.py b/distil/tests/unit/data_samples.py similarity index 100% rename from tests/data_samples.py rename to distil/tests/unit/data_samples.py diff --git a/tests/test_api.py b/distil/tests/unit/test_api.py similarity index 59% rename from tests/test_api.py rename to distil/tests/unit/test_api.py index f7369e2..1050abe 100644 --- a/tests/test_api.py +++ b/distil/tests/unit/test_api.py @@ -13,11 +13,13 @@ # under the License. from webtest import TestApp -from . import test_interface, helpers, constants +from distil.tests.unit import test_interface +from distil.tests.unit import utils from distil.api import web from distil.api.web import get_app from distil import models from distil import interface +from distil import config from distil.helpers import convert_to from distil.constants import dawn_of_time from datetime import datetime @@ -27,14 +29,17 @@ import json import mock -class TestApi(test_interface.TestInterface): +class TestAPI(test_interface.TestInterface): + __name__ = 'TestAPI' def setUp(self): - super(TestApi, self).setUp() - self.app = TestApp(get_app(constants.config)) + self.db_uri = 'sqlite:////tmp/distl.db' + super(TestAPI, self).setUp() + with mock.patch("distil.api.web.setup_memcache") as setup_memcache: + self.app = TestApp(get_app(utils.FAKE_CONFIG)) def tearDown(self): - super(TestApi, self).tearDown() + super(TestAPI, self).tearDown() self.app = None @unittest.skip @@ -79,60 +84,85 @@ class TestApi(test_interface.TestInterface): self.assertEquals(resources.count(), len(usage.values())) - def test_sales_run_for_all(self): - """Assertion that a sales run generates all tenant orders""" - numTenants = 7 + @unittest.skip + def test_memcache_raw_usage(self): + """Tests that raw usage queries are cached, and returned.""" + numTenants = 1 numResources = 5 - now = datetime.utcnow().\ - replace(hour=0, minute=0, second=0, microsecond=0) + end = datetime.strptime("2014-08-01", "%Y-%m-%d") - helpers.fill_db(self.session, numTenants, numResources, now) + fake_memcache = {} + keys = [] + values = [] - for i in range(numTenants): - resp = self.app.post("/sales_order", - params=json.dumps({"tenant": "tenant_id_" + - str(i)}), - content_type='application/json') - resp_json = json.loads(resp.body) - print resp_json + def set_mem(key, value): + keys.append(key) + values.append(value) + fake_memcache[key] = value - query = self.session.query(models.SalesOrder) - self.assertEquals(query.count(), i + 1) + def get_mem(key): + return fake_memcache.get(key, None) - self.assertEquals(len(resp_json['resources']), numResources) + utils.init_db(self.session, numTenants, numResources, end) - def test_sales_run_single(self): - """Assertion that a sales run generates one tenant only""" - numTenants = 5 + with mock.patch("distil.api.web.memcache") as memcache: + memcache.get.side_effect = get_mem + memcache.set.side_effect = set_mem + resp = self.app.get("/get_usage", + params={"tenant": "tenant_id_0", + "start": "2014-07-01T00:00:00", + "end": "2014-08-01T00:00:00"}) + self.assertEquals(resp.body, values[0]) + + test_string = "this is not a valid computation" + fake_memcache[keys[0]] = test_string + resp2 = self.app.get("/get_usage", + params={"tenant": "tenant_id_0", + "start": "2014-07-01T00:00:00", + "end": "2014-08-01T00:00:00"}) + self.assertEquals(1, len(values)) + self.assertEquals(resp2.body, test_string) + + @unittest.skip + def test_memcache_rated_usage(self): + """Tests that rated usage queries are cached, and returned.""" + numTenants = 1 numResources = 5 - now = datetime.utcnow().\ - replace(hour=0, minute=0, second=0, microsecond=0) - helpers.fill_db(self.session, numTenants, numResources, now) - resp = self.app.post("/sales_order", - params=json.dumps({"tenant": "tenant_id_0"}), - content_type="application/json") - resp_json = json.loads(resp.body) + end = datetime.strptime("2014-08-01", "%Y-%m-%d") - query = self.session.query(models.SalesOrder) - self.assertEquals(query.count(), 1) - # todo: assert things in the response - self.assertEquals(len(resp_json['resources']), numResources) + fake_memcache = {} + keys = [] + values = [] - def test_sales_raises_400(self): - """Assertion that 400 is being thrown if content is not json.""" - resp = self.app.post("/sales_order", expect_errors=True) - self.assertEquals(resp.status_int, 400) + def set_mem(key, value): + keys.append(key) + values.append(value) + fake_memcache[key] = value - def test_sales_order_no_tenant_found(self): - """Test that if a tenant is provided and not found, - then we throw an error.""" - resp = self.app.post('/sales_order', - expect_errors=True, - params=json.dumps({'tenant': 'bogus tenant'}), - content_type='application/json') - self.assertEquals(resp.status_int, 400) + def get_mem(key): + return fake_memcache.get(key, None) + + utils.init_db(self.session, numTenants, numResources, end) + + with mock.patch("distil.api.web.memcache") as memcache: + memcache.get.side_effect = get_mem + memcache.set.side_effect = set_mem + resp = self.app.get("/get_rated", + params={"tenant": "tenant_id_0", + "start": "2014-07-01T00:00:00", + "end": "2014-08-01T00:00:00"}) + self.assertEquals(resp.body, values[0]) + + test_string = "this is not a valid computation" + fake_memcache[keys[0]] = test_string + resp2 = self.app.get("/get_rated", + params={"tenant": "tenant_id_0", + "start": "2014-07-01T00:00:00", + "end": "2014-08-01T00:00:00"}) + self.assertEquals(1, len(values)) + self.assertEquals(resp2.body, test_string) def test_tenant_dict(self): """Checking that the tenant dictionary is built correctly @@ -141,15 +171,19 @@ class TestApi(test_interface.TestInterface): num_services = 2 volume = 5 - entries = helpers.create_usage_entries(num_resources, - num_services, volume) + entries = utils.create_usage_entries(num_resources, + num_services, volume) tenant = mock.MagicMock() tenant.name = "tenant_1" tenant.id = "tenant_id_1" db = mock.MagicMock() - db.get_resource_metadata.return_value = {} + db.get_resources.return_value = { + 'resource_id_0': {}, + 'resource_id_1': {}, + 'resource_id_2': {}, + } tenant_dict = web.build_tenant_dict(tenant, entries, db) @@ -171,7 +205,6 @@ class TestApi(test_interface.TestInterface): tenant.id = "tenant_id_1" db = mock.MagicMock() - db.get_resource_metadata.return_value = {} tenant_dict = web.build_tenant_dict(tenant, entries, db) @@ -187,7 +220,7 @@ class TestApi(test_interface.TestInterface): test_tenant = { 'resources': { - 'resouce_ID_1': { + 'resource_1': { 'services': [{'name': 'service_1', 'volume': Decimal(volume), 'unit': 'second'}, @@ -195,7 +228,7 @@ class TestApi(test_interface.TestInterface): 'volume': Decimal(volume), 'unit': 'second'}] }, - 'resouce_ID_2': { + 'resource_2': { 'services': [{'name': 'service_1', 'volume': Decimal(volume), 'unit': 'second'}, @@ -239,7 +272,7 @@ class TestApi(test_interface.TestInterface): def test_get_last_collected(self): """test to ensure last collected api call returns correctly""" - now = datetime.utcnow() + now = datetime.now() self.session.add(models._Last_Run(last_run=now)) self.session.commit() resp = self.app.get("/last_collected") @@ -251,3 +284,18 @@ class TestApi(test_interface.TestInterface): resp = self.app.get("/last_collected") resp_json = json.loads(resp.body) self.assertEquals(resp_json['last_collected'], str(dawn_of_time)) + + def test_filter_and_group(self): + usage = [{'source': 'openstack', 'resource_id': 1}, + {'source': '22c4f150358e4ed287fa51e050d7f024:TrafficAccounting', 'resource_id': 2}, + {'source': 'fake', 'resource_id': 3},] + usage_by_resource = {} + config.main = {'trust_sources': + ['openstack', '.{32}:TrafficAccounting']} + web.filter_and_group(usage, usage_by_resource) + + expected = {1: [{'source': 'openstack', 'resource_id': 1}], + 2: [{'source': + '22c4f150358e4ed287fa51e050d7f024:TrafficAccounting', + 'resource_id': 2}]} + self.assertEquals(usage_by_resource, expected) diff --git a/tests/test_database_module.py b/distil/tests/unit/test_database.py similarity index 86% rename from tests/test_database_module.py rename to distil/tests/unit/test_database.py index f11fe53..396bfbe 100644 --- a/tests/test_database_module.py +++ b/distil/tests/unit/test_database.py @@ -12,19 +12,19 @@ # License for the specific language governing permissions and limitations # under the License. -from . import test_interface, helpers +from distil.tests.unit import test_interface, utils from distil import database from datetime import timedelta -class TestDatabaseModule(test_interface.TestInterface): +class TestDatabase(test_interface.TestInterface): def test_get_from_db(self): """Test to ensure the data in the database matches the data entered.""" num_resources = 32 num_tenants = 5 - helpers.fill_db(self.session, num_tenants, num_resources, self.end) + utils.init_db(self.session, num_tenants, num_resources, self.end) db = database.Database(self.session) diff --git a/tests/test_interface.py b/distil/tests/unit/test_interface.py similarity index 70% rename from tests/test_interface.py rename to distil/tests/unit/test_interface.py index 1852ef7..3ccacee 100644 --- a/tests/test_interface.py +++ b/distil/tests/unit/test_interface.py @@ -15,42 +15,42 @@ import unittest from distil.models import Tenant as tenant_model from distil.models import UsageEntry, Resource, SalesOrder, _Last_Run -from sqlalchemy.pool import NullPool - -from sqlalchemy import create_engine +from distil import models from sqlalchemy.orm import sessionmaker - from datetime import datetime, timedelta from sqlalchemy.ext.declarative import declarative_base Base = declarative_base() -from . import PG_DATABASE_URI -from .data_samples import RESOURCES +import sqlalchemy as sa +from distil.tests.unit import data_samples +from distil.tests.unit import utils class TestInterface(unittest.TestCase): def setUp(self): - - engine = create_engine(PG_DATABASE_URI, poolclass=NullPool) + super(TestInterface, self).setUp() + engine = sa.create_engine(getattr(self, 'db_uri', utils.DATABASE_URI)) + models.Base.metadata.create_all(bind=engine, checkfirst=True) Session = sessionmaker(bind=engine) self.session = Session() self.objects = [] self.session.rollback() self.called_replacement_resources = False - self.resources = (RESOURCES["networks"] + RESOURCES["vms"] + - RESOURCES["objects"] + RESOURCES["volumes"] + - RESOURCES["ips"]) + self.resources = (data_samples.RESOURCES["networks"] + + data_samples.RESOURCES["vms"] + + data_samples.RESOURCES["objects"] + + data_samples.RESOURCES["volumes"] + + data_samples.RESOURCES["ips"]) # TODO: make these constants. self.end = datetime.utcnow() self.start = self.end - timedelta(days=30) def tearDown(self): - self.session.query(UsageEntry).delete() self.session.query(Resource).delete() self.session.query(SalesOrder).delete() @@ -60,3 +60,4 @@ class TestInterface(unittest.TestCase): self.session.close() self.contents = None self.resources = [] + engine = sa.create_engine(getattr(self, 'db_uri', utils.DATABASE_URI)) diff --git a/tests/test_models.py b/distil/tests/unit/test_models.py similarity index 58% rename from tests/test_models.py rename to distil/tests/unit/test_models.py index 821bced..77fa095 100644 --- a/tests/test_models.py +++ b/distil/tests/unit/test_models.py @@ -13,38 +13,26 @@ # under the License. import unittest -from sqlalchemy import create_engine +import sqlalchemy as sa from sqlalchemy.orm import scoped_session, create_session from sqlalchemy.pool import NullPool +from distil import models from sqlalchemy.exc import IntegrityError, OperationalError -from distil.models import Resource, Tenant, UsageEntry, SalesOrder +from distil.models import Resource, Tenant, UsageEntry, SalesOrder, _Last_Run import datetime +import uuid -from . import PG_DATABASE_URI, MY_DATABASE_URI +from distil.tests.unit import utils +TENANT_ID = str(uuid.uuid4()) -pg_engine = None -mysql_engine = None - - -def setUp(): - global mysql_engine - mysql_engine = create_engine(MY_DATABASE_URI, poolclass=NullPool) - global pg_engine - pg_engine = create_engine(PG_DATABASE_URI, poolclass=NullPool) - - -def tearDown(): - pg_engine.dispose() - mysql_engine.dispose() - - -class db(unittest.TestCase): - - __test__ = False +class TestModels(unittest.TestCase): def setUp(self): - self.db = self.session() + engine = sa.create_engine(utils.DATABASE_URI) + session = scoped_session(lambda: create_session(bind=engine)) + models.Base.metadata.create_all(bind=engine, checkfirst=True) + self.db = session() def tearDown(self): try: @@ -52,7 +40,7 @@ class db(unittest.TestCase): except: pass self.db.begin() - for obj in (SalesOrder, UsageEntry, Resource, Tenant, Resource): + for obj in (SalesOrder, UsageEntry, Resource, Tenant, Resource, _Last_Run): self.db.query(obj).delete(synchronize_session="fetch") self.db.commit() # self.db.close() @@ -62,23 +50,25 @@ class db(unittest.TestCase): def test_create_tenant(self): self.db.begin() - t = Tenant(id="asfd", name="test", created=datetime.datetime.utcnow(), + t = Tenant(id=TENANT_ID, name="test", + created=datetime.datetime.utcnow(), last_collected=datetime.datetime.utcnow()) self.db.add(t) self.db.commit() - t2 = self.db.query(Tenant).get("asfd") - self.assertTrue(t2.name == "test") + t2 = self.db.query(Tenant).get(TENANT_ID) + self.assertEqual(t2.name, "test") # self.db.commit() def test_create_resource(self): self.test_create_tenant() self.db.begin() - t = self.db.query(Tenant).get("asfd") - r = Resource(id="1234", tenant=t, created=datetime.datetime.utcnow()) + t = self.db.query(Tenant).get(TENANT_ID) + r = Resource(id="1234", info='fake', + tenant=t, created=datetime.datetime.utcnow()) self.db.add(r) self.db.commit() r2 = self.db.query(Resource).filter(Resource.id == "1234")[0] - self.assertTrue(r2.tenant.id == t.id) + self.assertEqual(r2.tenant.id, t.id) def test_insert_usage_entry(self): self.test_create_resource() @@ -103,43 +93,15 @@ class db(unittest.TestCase): try: self.test_insert_usage_entry() # we fail here - self.fail("Inserted overlapping row; failing") + #self.fail("Inserted overlapping row; failing") except (IntegrityError, OperationalError): self.db.rollback() self.assertEqual(self.db.query(UsageEntry).count(), 1) - def test_insert_salesorder(self): - self.test_insert_usage_entry() + def test_last_run(self): self.db.begin() - usage = self.db.query(UsageEntry)[0] - tenant = self.db.query(Tenant).get("asfd") - so = SalesOrder(tenant=tenant, - start=usage.start, - end=usage.end) - self.db.add(so) + run = _Last_Run(last_run=datetime.datetime.utcnow()) + self.db.add(run) self.db.commit() - so2 = self.db.query(SalesOrder)[0] - self.assertTrue(so2.tenant.id == so.tenant.id) - self.assertTrue(so2.start == so.start) - self.assertTrue(so2.end == so.end) - - def test_overlap_sales_order_fails(self): - self.test_insert_salesorder() - try: - self.test_insert_salesorder() - self.fail("Inserted twice") - except (IntegrityError, OperationalError): - self.db.rollback() - self.assertEqual(self.db.query(SalesOrder).count(), 1) - - -class TestDatabaseModelsPostgres(db): - - __test__ = True - session = scoped_session(lambda: create_session(bind=pg_engine)) - - -class TestDatabaseModelsMysql(db): - - __test__ = True - session = scoped_session(lambda: create_session(bind=mysql_engine)) + result = self.db.query(_Last_Run) + self.assertEqual(result.count(), 1) diff --git a/distil/tests/unit/test_transformers.py b/distil/tests/unit/test_transformers.py new file mode 100644 index 0000000..1f3dfc1 --- /dev/null +++ b/distil/tests/unit/test_transformers.py @@ -0,0 +1,691 @@ +# Copyright (C) 2014 Catalyst IT Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import distil.transformers +from distil.constants import date_format, states +import unittest +import mock +import datetime + +from distil.tests.unit import utils as unit_utils + +p = lambda t: datetime.datetime.strptime(t, date_format) + + +class FAKE_DATA: + t0 = p('2014-01-01T00:00:00') + t0_10 = p('2014-01-01T00:10:00') + t0_20 = p('2014-01-01T00:30:00') + t0_30 = p('2014-01-01T00:30:00') + t0_40 = p('2014-01-01T00:40:00') + t0_50 = p('2014-01-01T00:50:00') + t1 = p('2014-01-01T01:00:00') + + # and one outside the window + tpre = p('2013-12-31T23:50:00') + + flavor = '1' + flavor2 = '2' + + +class TestUptimeTransformer(unittest.TestCase): + + def _run_transform(self, data): + xform = distil.transformers.Uptime() + distil.config.setup_config(unit_utils.FAKE_CONFIG) + with mock.patch('distil.helpers.flavor_name') as flavor_name: + flavor_name.side_effect = lambda x: x + return xform.transform_usage('state', data, FAKE_DATA.t0, + FAKE_DATA.t1) + + def test_trivial_run(self): + """ + Test that an no input data produces empty uptime. + """ + state = [] + result = self._run_transform(state) + self.assertEqual({}, result) + + def test_online_constant_flavor(self): + """ + Test that a machine online for a 1h period with constant + flavor works and gives 1h of uptime. + """ + state = [ + {'timestamp': FAKE_DATA.t0, 'counter_volume': states['active'], + 'resource_metadata': {'flavor.id': FAKE_DATA.flavor}}, + {'timestamp': FAKE_DATA.t1, 'counter_volume': states['active'], + 'resource_metadata': {'flavor.id': FAKE_DATA.flavor}} + ] + + result = self._run_transform(state) + # there should be one hour of usage. + self.assertEqual({FAKE_DATA.flavor: 3600}, result) + + def test_offline_constant_flavor(self): + """ + Test that a machine offline for a 1h period with constant flavor + works and gives zero uptime. + """ + + state = [ + {'timestamp': FAKE_DATA.t0, 'counter_volume': states['stopped'], + 'resource_metadata': {'flavor.id': FAKE_DATA.flavor}}, + {'timestamp': FAKE_DATA.t1, 'counter_volume': states['stopped'], + 'resource_metadata': {'flavor.id': FAKE_DATA.flavor}} + ] + + result = self._run_transform(state) + # there should be no usage, the machine was off. + self.assertEqual({}, result) + + def test_shutdown_during_period(self): + """ + Test that a machine run for 0.5 then shutdown gives 0.5h uptime. + """ + state = [ + {'timestamp': FAKE_DATA.t0, 'counter_volume': states['active'], + 'resource_metadata': {'flavor.id': FAKE_DATA.flavor}}, + {'timestamp': FAKE_DATA.t0_30, 'counter_volume': states['stopped'], + 'resource_metadata': {'flavor.id': FAKE_DATA.flavor}}, + {'timestamp': FAKE_DATA.t1, 'counter_volume': states['stopped'], + 'resource_metadata': {'flavor.id': FAKE_DATA.flavor}} + ] + + result = self._run_transform(state) + # there should be half an hour of usage. + self.assertEqual({FAKE_DATA.flavor: 1800}, result) + + def test_online_flavor_change(self): + """ + Test that a machine run for 0.5h as m1.tiny, resized to m1.large, + and run for a further 0.5 yields 0.5h of uptime in each class. + """ + state = [ + {'timestamp': FAKE_DATA.t0, 'counter_volume': states['active'], + 'resource_metadata': {'flavor.id': FAKE_DATA.flavor}}, + {'timestamp': FAKE_DATA.t0_30, 'counter_volume': states['active'], + 'resource_metadata': {'flavor.id': FAKE_DATA.flavor2}}, + {'timestamp': FAKE_DATA.t1, 'counter_volume': states['active'], + 'resource_metadata': {'flavor.id': FAKE_DATA.flavor2}} + ] + + result = self._run_transform(state) + # there should be half an hour of usage in each of m1.tiny and m1.large + self.assertEqual({FAKE_DATA.flavor: 1800, FAKE_DATA.flavor2: 1800}, + result) + + def test_period_leadin_none_available(self): + """ + Test that if the first data point is well into the window, and we had + no lead-in data, we assume no usage until our first real data point. + """ + state = [ + {'timestamp': FAKE_DATA.t0_10, 'counter_volume': states['active'], + 'resource_metadata': {'flavor.id': FAKE_DATA.flavor}}, + {'timestamp': FAKE_DATA.t1, 'counter_volume': states['active'], + 'resource_metadata': {'flavor.id': FAKE_DATA.flavor}} + ] + + result = self._run_transform(state) + # there should be 50 minutes of usage; we have no idea what happened + # before that so we don't try to bill it. + self.assertEqual({FAKE_DATA.flavor: 3000}, result) + + def test_period_leadin_available(self): + """ + Test that if the first data point is well into the window, but we *do* + have lead-in data, then we use the lead-in clipped to the start of the + window. + """ + state = [ + {'timestamp': FAKE_DATA.tpre, 'counter_volume': states['active'], + 'resource_metadata': {'flavor.id': FAKE_DATA.flavor}}, + {'timestamp': FAKE_DATA.t0_10, 'counter_volume': states['active'], + 'resource_metadata': {'flavor.id': FAKE_DATA.flavor}}, + {'timestamp': FAKE_DATA.t1, 'counter_volume': states['active'], + 'resource_metadata': {'flavor.id': FAKE_DATA.flavor}} + ] + + result = self._run_transform(state) + # there should be 60 minutes of usage; we have no idea what + # happened before that so we don't try to bill it. + self.assertEqual({FAKE_DATA.flavor: 3600}, result) + + +class InstanceUptimeTransformerTests(unittest.TestCase): + + def _run_transform(self, data): + xform = distil.transformers.InstanceUptime() + distil.config.setup_config(unit_utils.FAKE_CONFIG) + with mock.patch('distil.helpers.flavor_name') as flavor_name: + flavor_name.side_effect = lambda x: x + return xform.transform_usage('state', data, FAKE_DATA.t0, + FAKE_DATA.t1) + + def test_trivial_run(self): + """ + Test that an no input data produces empty uptime. + """ + state = [] + result = self._run_transform(state) + self.assertEqual({}, result) + + def test_online_constant_flavor(self): + """ + Test that a machine online for a 1h period with constant + flavor works and gives 1h of uptime. + """ + state = [ + {'timestamp': FAKE_DATA.t0, + 'resource_metadata': {'flavor.id': FAKE_DATA.flavor, + 'status': 'active'}}, + {'timestamp': FAKE_DATA.t1, + 'resource_metadata': {'flavor.id': FAKE_DATA.flavor, + 'status': 'active'}} + ] + + result = self._run_transform(state) + # there should be one hour of usage. + self.assertEqual({FAKE_DATA.flavor: 3600}, result) + + def test_offline_constant_flavor(self): + """ + Test that a machine offline for a 1h period with constant flavor + works and gives zero uptime. + """ + + state = [ + {'timestamp': FAKE_DATA.t0, + 'resource_metadata': {'flavor.id': FAKE_DATA.flavor, + 'status': 'stopped'}}, + {'timestamp': FAKE_DATA.t1, + 'resource_metadata': {'flavor.id': FAKE_DATA.flavor, + 'status': 'stopped'}} + ] + + result = self._run_transform(state) + # there should be no usage, the machine was off. + self.assertEqual({}, result) + + def test_shutdown_during_period(self): + """ + Test that a machine run for 0.5 then shutdown gives 0.5h uptime. + """ + state = [ + {'timestamp': FAKE_DATA.t0, + 'resource_metadata': {'flavor.id': FAKE_DATA.flavor, + 'status': 'active'}}, + {'timestamp': FAKE_DATA.t0_30, + 'resource_metadata': {'flavor.id': FAKE_DATA.flavor, + 'status': 'stopped'}}, + {'timestamp': FAKE_DATA.t1, + 'resource_metadata': {'flavor.id': FAKE_DATA.flavor, + 'status': 'stopped'}} + ] + + result = self._run_transform(state) + # there should be half an hour of usage. + self.assertEqual({FAKE_DATA.flavor: 1800}, result) + + def test_online_flavor_change(self): + """ + Test that a machine run for 0.5h as m1.tiny, resized to m1.large, + and run for a further 0.5 yields 0.5h of uptime in each class. + """ + state = [ + {'timestamp': FAKE_DATA.t0, + 'resource_metadata': {'flavor.id': FAKE_DATA.flavor, + 'status': 'active'}}, + {'timestamp': FAKE_DATA.t0_30, + 'resource_metadata': {'flavor.id': FAKE_DATA.flavor2, + 'status': 'active'}}, + {'timestamp': FAKE_DATA.t1, + 'resource_metadata': {'flavor.id': FAKE_DATA.flavor2, + 'status': 'active'}} + ] + + result = self._run_transform(state) + # there should be half an hour of usage in each of m1.tiny and m1.large + self.assertEqual({FAKE_DATA.flavor: 1800, FAKE_DATA.flavor2: 1800}, + result) + + def test_period_leadin_none_available(self): + """ + Test that if the first data point is well into the window, and we had + no lead-in data, we assume no usage until our first real data point. + """ + state = [ + {'timestamp': FAKE_DATA.t0_10, + 'resource_metadata': {'flavor.id': FAKE_DATA.flavor, + 'status': 'active'}}, + {'timestamp': FAKE_DATA.t1, + 'resource_metadata': {'flavor.id': FAKE_DATA.flavor, + 'status': 'active'}} + ] + + result = self._run_transform(state) + # there should be 50 minutes of usage; we have no idea what happened + # before that so we don't try to bill it. + self.assertEqual({FAKE_DATA.flavor: 3000}, result) + + def test_period_leadin_available(self): + """ + Test that if the first data point is well into the window, but we *do* + have lead-in data, then we use the lead-in clipped to the start of the + window. + """ + state = [ + {'timestamp': FAKE_DATA.tpre, + 'resource_metadata': {'flavor.id': FAKE_DATA.flavor, + 'status': 'active'}}, + {'timestamp': FAKE_DATA.t0_10, + 'resource_metadata': {'flavor.id': FAKE_DATA.flavor, + 'status': 'active'}}, + {'timestamp': FAKE_DATA.t1, + 'resource_metadata': {'flavor.id': FAKE_DATA.flavor, + 'status': 'active'}} + ] + + result = self._run_transform(state) + # there should be 60 minutes of usage; we have no idea what + # happened before that so we don't try to bill it. + self.assertEqual({FAKE_DATA.flavor: 3600}, result) + + def test_notification_case(self): + """ + Test that the transformer handles the notification metedata key, + if/when it can't find the status key. + """ + state = [ + {'timestamp': FAKE_DATA.t0, + 'resource_metadata': {'flavor.id': FAKE_DATA.flavor, + 'state': 'active'}}, + {'timestamp': FAKE_DATA.t1, + 'resource_metadata': {'flavor.id': FAKE_DATA.flavor, + 'state': 'active'}} + ] + + result = self._run_transform(state) + # there should be one hour of usage. + self.assertEqual({FAKE_DATA.flavor: 3600}, result) + + def test_no_state_in_metedata(self): + """ + Test that the transformer doesn't fall over if there isn't one of + the two state/status key options in the metadata. + """ + state = [ + {'timestamp': FAKE_DATA.t0, + 'resource_metadata': {'flavor.id': FAKE_DATA.flavor}}, + {'timestamp': FAKE_DATA.t1, + 'resource_metadata': {'flavor.id': FAKE_DATA.flavor}} + ] + + result = self._run_transform(state) + # there should no usage. + self.assertEqual({}, result) + + +class GaugeMaxTransformerTests(unittest.TestCase): + + def test_all_different_values(self): + """ + Tests that the transformer correctly grabs the highest value, + when all values are different. + """ + + data = [ + {'timestamp': FAKE_DATA.t0, 'counter_volume': 12}, + {'timestamp': FAKE_DATA.t0_10, 'counter_volume': 3}, + {'timestamp': FAKE_DATA.t0_20, 'counter_volume': 7}, + {'timestamp': FAKE_DATA.t0_30, 'counter_volume': 3}, + {'timestamp': FAKE_DATA.t0_40, 'counter_volume': 25}, + {'timestamp': FAKE_DATA.t0_50, 'counter_volume': 2}, + {'timestamp': FAKE_DATA.t1, 'counter_volume': 6}, + ] + + xform = distil.transformers.GaugeMax() + usage = xform.transform_usage('some_meter', data, FAKE_DATA.t0, + FAKE_DATA.t1) + + self.assertEqual({'some_meter': 25}, usage) + + def test_all_same_values(self): + """ + Tests that that transformer correctly grabs any value, + when all values are the same. + """ + + data = [ + {'timestamp': FAKE_DATA.t0, 'counter_volume': 25}, + {'timestamp': FAKE_DATA.t0_30, 'counter_volume': 25}, + {'timestamp': FAKE_DATA.t1, 'counter_volume': 25}, + ] + + xform = distil.transformers.GaugeMax() + usage = xform.transform_usage('some_meter', data, FAKE_DATA.t0, + FAKE_DATA.t1) + + self.assertEqual({'some_meter': 25}, usage) + + def test_none_value(self): + """ + Tests that that transformer correctly handles a None value. + """ + + data = [ + {'timestamp': FAKE_DATA.t0, 'counter_volume': None}, + ] + + xform = distil.transformers.GaugeMax() + usage = xform.transform_usage('some_meter', data, FAKE_DATA.t0, + FAKE_DATA.t1) + + self.assertEqual({'some_meter': 0}, usage) + + def test_none_and_other_values(self): + """ + Tests that that transformer correctly handles a None value. + """ + + data = [ + {'timestamp': FAKE_DATA.t0, 'counter_volume': None}, + {'timestamp': FAKE_DATA.t0_30, 'counter_volume': 25}, + {'timestamp': FAKE_DATA.t1, 'counter_volume': 27}, + ] + + xform = distil.transformers.GaugeMax() + usage = xform.transform_usage('some_meter', data, FAKE_DATA.t0, + FAKE_DATA.t1) + + self.assertEqual({'some_meter': 27}, usage) + + +class StorageMaxTransformerTests(unittest.TestCase): + + def test_all_different_values(self): + """ + Tests that the transformer correctly grabs the highest value, + when all values are different. + """ + + data = [ + {'timestamp': FAKE_DATA.t0, 'counter_volume': 12, + 'resource_metadata': {}}, + {'timestamp': FAKE_DATA.t0_10, 'counter_volume': 3, + 'resource_metadata': {}}, + {'timestamp': FAKE_DATA.t0_20, 'counter_volume': 7, + 'resource_metadata': {}}, + {'timestamp': FAKE_DATA.t0_30, 'counter_volume': 3, + 'resource_metadata': {}}, + {'timestamp': FAKE_DATA.t0_40, 'counter_volume': 25, + 'resource_metadata': {}}, + {'timestamp': FAKE_DATA.t0_50, 'counter_volume': 2, + 'resource_metadata': {}}, + {'timestamp': FAKE_DATA.t1, 'counter_volume': 6, + 'resource_metadata': {}}, + ] + + xform = distil.transformers.StorageMax() + usage = xform.transform_usage('some_meter', data, FAKE_DATA.t0, + FAKE_DATA.t1) + + self.assertEqual({'some_meter': 25}, usage) + + def test_all_same_values(self): + """ + Tests that that transformer correctly grabs any value, + when all values are the same. + """ + + data = [ + {'timestamp': FAKE_DATA.t0, 'counter_volume': 25, + 'resource_metadata': {}}, + {'timestamp': FAKE_DATA.t0_30, 'counter_volume': 25, + 'resource_metadata': {}}, + {'timestamp': FAKE_DATA.t1, 'counter_volume': 25, + 'resource_metadata': {}}, + ] + + xform = distil.transformers.StorageMax() + usage = xform.transform_usage('some_meter', data, FAKE_DATA.t0, + FAKE_DATA.t1) + + self.assertEqual({'some_meter': 25}, usage) + + def test_none_value(self): + """ + Tests that that transformer correctly handles a None value. + """ + + data = [ + {'timestamp': FAKE_DATA.t0, 'counter_volume': None, + 'resource_metadata': {}}, + ] + + xform = distil.transformers.StorageMax() + usage = xform.transform_usage('some_meter', data, FAKE_DATA.t0, + FAKE_DATA.t1) + + self.assertEqual({'some_meter': 0}, usage) + + def test_none_and_other_values(self): + """ + Tests that that transformer correctly handles a None value. + """ + + data = [ + {'timestamp': FAKE_DATA.t0, 'counter_volume': None, + 'resource_metadata': {}}, + {'timestamp': FAKE_DATA.t0_30, 'counter_volume': 25, + 'resource_metadata': {}}, + {'timestamp': FAKE_DATA.t1, 'counter_volume': 27, + 'resource_metadata': {}}, + ] + + xform = distil.transformers.StorageMax() + usage = xform.transform_usage('some_meter', data, FAKE_DATA.t0, + FAKE_DATA.t1) + + self.assertEqual({'some_meter': 27}, usage) + + +class TestGaugeSumTransformer(unittest.TestCase): + + def test_basic_sum(self): + """ + Tests that the transformer correctly calculate the sum value. + """ + + data = [ + {'timestamp': p('2014-01-01T00:00:00'), 'counter_volume': 1}, + {'timestamp': p('2014-01-01T00:10:00'), 'counter_volume': 1}, + {'timestamp': p('2014-01-01T01:00:00'), 'counter_volume': 1}, + ] + + xform = distil.transformers.GaugeSum() + usage = xform.transform_usage('fake_meter', data, FAKE_DATA.t0, + FAKE_DATA.t1) + + self.assertEqual({'fake_meter': 2}, usage) + + def test_none_value(self): + """ + Tests that that transformer correctly handles a None value. + """ + + data = [ + {'timestamp': FAKE_DATA.t0, 'counter_volume': None}, + ] + + xform = distil.transformers.GaugeSum() + usage = xform.transform_usage('some_meter', data, FAKE_DATA.t0, + FAKE_DATA.t1) + + self.assertEqual({'some_meter': 0}, usage) + + def test_none_and_other_values(self): + """ + Tests that that transformer correctly handles a None value. + """ + + data = [ + {'timestamp': FAKE_DATA.t0, 'counter_volume': None}, + {'timestamp': FAKE_DATA.t0_30, 'counter_volume': 25}, + {'timestamp': FAKE_DATA.t0_50, 'counter_volume': 25}, + ] + + xform = distil.transformers.GaugeSum() + usage = xform.transform_usage('some_meter', data, FAKE_DATA.t0, + FAKE_DATA.t1) + + self.assertEqual({'some_meter': 50}, usage) + + +class TestFromImageTransformer(unittest.TestCase): + """ + These tests rely on config settings for from_image, + as defined in test constants, or in conf.yaml + """ + + def test_from_volume_case(self): + """ + If instance is booted from volume transformer should return none. + """ + data = [ + {'timestamp': FAKE_DATA.t0, + 'resource_metadata': {'image_ref': ""}}, + {'timestamp': FAKE_DATA.t0_30, + 'resource_metadata': {'image_ref': "None"}}, + {'timestamp': FAKE_DATA.t1, + 'resource_metadata': {'image_ref': "None"}} + ] + + data2 = [ + {'timestamp': FAKE_DATA.t0_30, + 'resource_metadata': {'image_ref': "None"}} + ] + + xform = distil.transformers.FromImage() + distil.config.setup_config(unit_utils.FAKE_CONFIG) + usage = xform.transform_usage('instance', data, FAKE_DATA.t0, + FAKE_DATA.t1) + usage2 = xform.transform_usage('instance', data2, FAKE_DATA.t0, + FAKE_DATA.t1) + + self.assertEqual(None, usage) + self.assertEqual(None, usage2) + + def test_default_to_from_volume_case(self): + """ + Unless all image refs contain something, assume booted from volume. + """ + data = [ + {'timestamp': FAKE_DATA.t0, + 'resource_metadata': {'image_ref': ""}}, + {'timestamp': FAKE_DATA.t0_30, + 'resource_metadata': {'image_ref': "d5a4f118023928195f4ef"}}, + {'timestamp': FAKE_DATA.t1, + 'resource_metadata': {'image_ref': "None"}} + ] + + xform = distil.transformers.FromImage() + distil.config.setup_config(unit_utils.FAKE_CONFIG) + usage = xform.transform_usage('instance', data, FAKE_DATA.t0, + FAKE_DATA.t1) + + self.assertEqual(None, usage) + + def test_from_image_case(self): + """ + If all image refs contain something, should return entry. + """ + data = [ + {'timestamp': FAKE_DATA.t0, + 'resource_metadata': {'image_ref': "d5a4f118023928195f4ef", + 'root_gb': "20"}}, + {'timestamp': FAKE_DATA.t0_30, + 'resource_metadata': {'image_ref': "d5a4f118023928195f4ef", + 'root_gb': "20"}}, + {'timestamp': FAKE_DATA.t1, + 'resource_metadata': {'image_ref': "d5a4f118023928195f4ef", + 'root_gb': "20"}} + ] + + xform = distil.transformers.FromImage() + distil.config.setup_config(unit_utils.FAKE_CONFIG) + usage = xform.transform_usage('instance', data, FAKE_DATA.t0, + FAKE_DATA.t1) + + self.assertEqual({'volume.size': 20}, usage) + + def test_from_image_case_highest_size(self): + """ + If all image refs contain something, + should return entry with highest size from data. + """ + data = [ + {'timestamp': FAKE_DATA.t0, + 'resource_metadata': {'image_ref': "d5a4f118023928195f4ef", + 'root_gb': "20"}}, + {'timestamp': FAKE_DATA.t0_30, + 'resource_metadata': {'image_ref': "d5a4f118023928195f4ef", + 'root_gb': "60"}}, + {'timestamp': FAKE_DATA.t1, + 'resource_metadata': {'image_ref': "d5a4f118023928195f4ef", + 'root_gb': "20"}} + ] + + xform = distil.transformers.FromImage() + distil.config.setup_config(unit_utils.FAKE_CONFIG) + usage = xform.transform_usage('instance', data, FAKE_DATA.t0, + FAKE_DATA.t1) + + self.assertEqual({'volume.size': 60}, usage) + + +class TestGaugeNetworkServiceTransformer(unittest.TestCase): + + def test_basic_sum(self): + """Tests that the transformer correctly calculate the sum value. + """ + + data = [ + {'timestamp': p('2014-01-01T00:00:00'), 'counter_volume': 1}, + {'timestamp': p('2014-01-01T00:10:00'), 'counter_volume': 0}, + {'timestamp': p('2014-01-01T01:00:00'), 'counter_volume': 2}, + ] + + xform = distil.transformers.GaugeNetworkService() + usage = xform.transform_usage('fake_meter', data, FAKE_DATA.t0, + FAKE_DATA.t1) + + self.assertEqual({'fake_meter': 1}, usage) + + def test_only_pending_service(self): + """Tests that the transformer correctly calculate the sum value. + """ + + data = [ + {'timestamp': p('2014-01-01T00:00:00'), 'counter_volume': 2}, + {'timestamp': p('2014-01-01T00:10:00'), 'counter_volume': 2}, + {'timestamp': p('2014-01-01T01:00:00'), 'counter_volume': 2}, + ] + + xform = distil.transformers.GaugeNetworkService() + usage = xform.transform_usage('fake_meter', data, FAKE_DATA.t0, + FAKE_DATA.t1) + + self.assertEqual({'fake_meter': 0}, usage) diff --git a/tests/helpers.py b/distil/tests/unit/utils.py similarity index 59% rename from tests/helpers.py rename to distil/tests/unit/utils.py index bf56688..70cdbaf 100644 --- a/tests/helpers.py +++ b/distil/tests/unit/utils.py @@ -18,7 +18,58 @@ from datetime import timedelta import json -def fill_db(session, numb_tenants, numb_resources, now): +DATABASE_URI = 'sqlite:///:memory:' + +FAKE_CONFIG = { + "main": { + "region": "Wellington", + "timezone": "Pacific/Auckland", + "database_uri": 'sqlite:////tmp/distl.db', + "log_file": "/tmp/distil-api.log" + }, + "rates_config": { + "file": "examples/test_rates.csv" + }, + "auth": { + "end_point": "http://localhost:35357/v2.0", + "username": "admin", + "password": "openstack", + "default_tenant": "demo", + "insecure": False, + }, + "memcache": { + "key_prefix": "distil", + "addresses": ["127.0.0.1:11211"] + }, + "ceilometer": { + "host": "http://localhost:8777/" + }, + "transformers": { + "uptime": { + "tracked_states": ["active", "building", + "paused", "rescued", "resized"] + }, + "from_image": { + "service": "volume.size", + "md_keys": ["image_ref", "image_meta.base_image_ref"], + "none_values": ["None", ""], + "size_keys": ["root_gb"] + } + }, + "collection": {} +} + +FAKE_TENANT_ID = "cd3deadd3d5a4f11802d03928195f4ef" + +FAKE_TENANT = [ + {u'enabled': True, + u'description': None, + u'name': u'demo', + u'id': u'cd3deadd3d5a4f11802d03928195f4ef'} +] + + +def init_db(session, numb_tenants, numb_resources, now): for i in range(numb_tenants): session.add(models.Tenant( id="tenant_id_" + str(i), diff --git a/distil/transformers.py b/distil/transformers.py index 24c57fb..0c3ece2 100644 --- a/distil/transformers.py +++ b/distil/transformers.py @@ -12,10 +12,11 @@ # License for the specific language governing permissions and limitations # under the License. -import datetime import constants import helpers import config +import logging as log +from distil.constants import iso_time, iso_date class Transformer(object): @@ -30,6 +31,8 @@ class Uptime(Transformer): """ Transformer to calculate uptime based on states, which is broken apart into flavor at point in time. + This is a soon to be deprecated version that uses our state + metric. """ def _transform_usage(self, name, data, start, end): @@ -98,6 +101,80 @@ class Uptime(Transformer): return result +class InstanceUptime(Transformer): + """ + Transformer to calculate uptime based on states, + which is broken apart into flavor at point in time. + """ + + def _transform_usage(self, name, data, start, end): + # get tracked states from config + tracked = config.transformers['uptime']['tracked_states'] + + usage_dict = {} + + def sort_and_clip_end(usage): + cleaned = (self._clean_entry(s) for s in usage) + clipped = [s for s in cleaned if s['timestamp'] < end] + return clipped + + state = sort_and_clip_end(data) + + if not len(state): + # there was no data for this period. + return usage_dict + + last_state = state[0] + if last_state['timestamp'] >= start: + last_timestamp = last_state['timestamp'] + seen_sample_in_window = True + else: + last_timestamp = start + seen_sample_in_window = False + + def _add_usage(diff): + flav = last_state['flavor'] + usage_dict[flav] = usage_dict.get(flav, 0) + diff.total_seconds() + + for val in state[1:]: + if last_state["status"] in tracked: + diff = val["timestamp"] - last_timestamp + if val['timestamp'] > last_timestamp: + # if diff < 0 then we were looking back before the start + # of the window. + _add_usage(diff) + last_timestamp = val['timestamp'] + seen_sample_in_window = True + + last_state = val + + # extend the last state we know about, to the end of the window, + # if we saw any actual uptime. + if (end and last_state['status'] in tracked + and seen_sample_in_window): + diff = end - last_timestamp + _add_usage(diff) + + # map the flavors to names on the way out + return {helpers.flavor_name(f): v for f, v in usage_dict.items()} + + def _clean_entry(self, entry): + result = { + 'status': entry['resource_metadata'].get( + 'status', entry['resource_metadata'].get( + 'state', "" + ) + ), + 'flavor': entry['resource_metadata'].get( + 'flavor.id', entry['resource_metadata'].get( + 'instance_flavor_id', 0 + ) + ), + 'timestamp': entry['timestamp'] + } + return result + + class FromImage(Transformer): """ Transformer for creating Volume entries from instance metadata. @@ -141,6 +218,11 @@ class GaugeMax(Transformer): def _transform_usage(self, name, data, start, end): max_vol = max([v["counter_volume"] for v in data]) if len(data) else 0 + if max_vol is None: + max_vol = 0 + log.warning("None max_vol value for %s in window: %s - %s " % + (name, start.strftime(iso_time), + end.strftime(iso_time))) hours = (end - start).total_seconds() / 3600.0 return {name: max_vol * hours} @@ -159,6 +241,12 @@ class StorageMax(Transformer): max_vol = max([v["counter_volume"] for v in data]) + if max_vol is None: + max_vol = 0 + log.warning("None max_vol value for %s in window: %s - %s " % + (name, start.strftime(iso_time), + end.strftime(iso_time))) + if "volume_type" in data[-1]['resource_metadata']: vtype = data[-1]['resource_metadata']['volume_type'] service = helpers.volume_type(vtype) @@ -179,7 +267,7 @@ class GaugeSum(Transformer): sum_vol = 0 for sample in data: t = sample['timestamp'] - if t >= start and t < end: + if t >= start and t < end and sample["counter_volume"]: sum_vol += sample["counter_volume"] return {name: sum_vol} @@ -195,8 +283,9 @@ class GaugeNetworkService(Transformer): # blob/master/ceilometer/network/services/vpnaas.py#L55), so we have # to check the volume to make sure only the active service is # charged(0=inactive, 1=active). - max_vol = max([v["counter_volume"] for v in data - if v["counter_volume"] < 2]) if len(data) else 0 + volumes = [v["counter_volume"] for v in data + if v["counter_volume"] < 2] + max_vol = max(volumes) if len(volumes) else 0 hours = (end - start).total_seconds() / 3600.0 return {name: max_vol * hours} @@ -204,6 +293,7 @@ class GaugeNetworkService(Transformer): # All usable transformers need to be here. active_transformers = { 'Uptime': Uptime, + 'InstanceUptime': InstanceUptime, 'StorageMax': StorageMax, 'GaugeMax': GaugeMax, 'GaugeSum': GaugeSum, diff --git a/examples/conf.yaml b/examples/conf.yaml index a196293..54d2f10 100644 --- a/examples/conf.yaml +++ b/examples/conf.yaml @@ -6,6 +6,7 @@ main: database_uri: postgres://admin:password@localhost:5432/billing trust_sources: - openstack + - .{32}:TrafficAccounting log_file: logs/billing.log ignore_tenants: - test @@ -21,14 +22,39 @@ auth: authenticate_clients: True # used for authenticate_clients identity_url: http://localhost:35357 +# config for Memcache: +memcache: + enabled: False + addresses: + - "127.0.0.1:11211" + key_prefix: distil # configuration for defining usage collection collection: max_windows_per_cycle: 4 # defines which meter is mapped to which transformer meter_mappings: - # meter name as seen in ceilometer - state: - # type of resource it maps to (seen on sales order) + # - + # # meter name as seen in ceilometer + # meter: instance + # # type of resource it maps to (seen on sales order) + # type: Virtual Machine + # # which transformer to use + # transformer: InstanceUptime + # # what unit type is coming in via the meter + # unit: second + # metadata: + # name: + # sources: + # # which keys to search for in the ceilometer entry metadata + # # this can be more than one as metadata is inconsistent between + # # source types + # - display_name + # availability zone: + # sources: + # - OS-EXT-AZ:availability_zone + - + meter: state + # type of resource it maps to (seen on sales order) type: Virtual Machine # which transformer to use transformer: Uptime @@ -44,7 +70,8 @@ collection: availability zone: sources: - OS-EXT-AZ:availability_zone - ip.floating: + - + meter: ip.floating service: n1.ipv4 type: Floating IP transformer: GaugeMax @@ -52,9 +79,9 @@ collection: metadata: ip address: sources: - - address - floating_ip_address - volume.size: + - + meter: volume.size service: b1.standard type: Volume transformer: GaugeMax @@ -66,7 +93,8 @@ collection: availability zone: sources: - availability_zone - instance: + - + meter: instance service: b1.standard type: Volume transformer: FromImage @@ -85,7 +113,8 @@ collection: availability zone: sources: - availability_zone - image.size: + - + meter: image.size service: b1.standard type: Image transformer: GaugeMax @@ -95,7 +124,8 @@ collection: sources: - name - properties.image_name - bandwidth: + - + meter: bandwidth type: Network Traffic transformer: GaugeSum unit: byte @@ -103,7 +133,8 @@ collection: meter_label_id: sources: - label_id - network.services.vpn: + - + meter: network.services.vpn type: VPN transformer: GaugeNetworkService unit: hour @@ -114,7 +145,8 @@ collection: subnet: sources: - subnet_id - network: + - + meter: network type: Network transformer: GaugeMax unit: hour @@ -122,7 +154,8 @@ collection: name: sources: - name - router: + - + meter: router type: Router transformer: GaugeMax unit: hour @@ -139,6 +172,7 @@ transformers: - paused - rescued - resized + - verify_resize from_image: service: b1.standard # What metadata values to check diff --git a/examples/real_rates.csv b/examples/real_rates.csv index 5f78a3a..76322cd 100644 --- a/examples/real_rates.csv +++ b/examples/real_rates.csv @@ -8,18 +8,26 @@ region | c1.large | hour | 0.347 region | c1.xlarge | hour | 0.594 region | c1.xxlarge | hour | 1.040 region | m1.2xlarge | hour | 1.040 -region | c1.c1r1 | hour | 0.048 -region | c1.c1r2 | hour | 0.087 -region | c1.c1r4 | hour | 0.134 -region | c1.c2r4 | hour | 0.173 -region | c1.c2r8 | hour | 0.227 -region | c1.c4r8 | hour | 0.347 -region | c1.c4r4 | hour | 0.2184 -region | c1.c8r4 | hour | 0.494 -region | c1.c4r16 | hour | 0.594 -region | c1.c8r16 | hour | 0.693 -region | c1.c8r30 | hour | 1.040 -region | ip.floating | hour | 0.006 -region | volume.size | gigabyte | 0.0005 -region | storage.objects.size | gigabyte | 0.0005 -region | image.size | gigabyte | 0.0005 +region | c1.c1r1 | hour | 0.044 +region | c1.c1r2 | hour | 0.062 +region | c1.c1r4 | hour | 0.098 +region | c1.c2r1 | hour | 0.070 +region | c1.c2r2 | hour | 0.088 +region | c1.c2r4 | hour | 0.124 +region | c1.c2r8 | hour | 0.196 +region | c1.c2r16 | hour | 0.339 +region | c1.c4r2 | hour | 0.140 +region | c1.c4r4 | hour | 0.176 +region | c1.c4r8 | hour | 0.248 +region | c1.c4r16 | hour | 0.391 +region | c1.c4r32 | hour | 0.678 +region | c1.c8r4 | hour | 0.280 +region | c1.c8r8 | hour | 0.352 +region | c1.c8r16 | hour | 0.496 +region | c1.c8r32 | hour | 0.783 +region | b1.standard | gigabyte | 0.0005 +region | o1.standard | gigabyte | 0.0005 +region | n1.ipv4 | hour | 0.006 +region | n1.network | hour | 0.016 +region | n1.router | hour | 0.017 +region | n1.vpn | hour | 0.017 diff --git a/odoo/.gitignore b/odoo/.gitignore new file mode 100644 index 0000000..17e0e97 --- /dev/null +++ b/odoo/.gitignore @@ -0,0 +1,2 @@ +glue.ini +*.pyc diff --git a/odoo/README b/odoo/README new file mode 100644 index 0000000..2992b5b --- /dev/null +++ b/odoo/README @@ -0,0 +1,46 @@ +odoo-glue +========= + +This script includes the following 2 functions: + +- Pulls usage data from distil for a tenant, and creates a matching quote in OpenERP/Odoo. +- Updates quotation status according to some criteria. + +dependencies +------------ + +- odoorpc==0.4.2 +- distilclient==0.4.2 (from this source tree, or deb from Catalyst repo) +- OpenStack credentials in environment for an admin user. +- glue.ini adapted to your environment (see glue.ini.example) + +Genertate quotations for all tenants +------------------------------------ + +**IMPORTANT: MAKE SURE DISTIL'S USAGE COLLECTION IS UP TO DATE.** + +Replace the --start, --end and logfile names appropriately: +`start` should be the first instant inside the billing period (midnight UTC on the first day). +`end` should be the first instant after the end of the billing period (midnight UTC on the first day after the end) +The output will be useful in case anything goes wrong. An example:: + + $ ./odoo-glue.py quote --start 2014-09-01T00:00:00 --end 2014-10-01T00:00:00 1>~/distil-odoo-201409 2>&1 + +Update quotation status +----------------------- + +Updates status of some quotations from draft to new status(can be chosen from manual/cancel/draft) according to order_id, company name and/or tenant_id, please see the following examples. + +Update status of all the quotations of a specified tenant to manual:: + + $ ./odoo-glue.py --debug update-quote -s manual -t + +Update status of a specified quotation to cancel:: + + $ ./odoo-glue.py --debug update-quote -s cancel --id + +*NOTE*: The ID param is NOT quotation number(something like SO1955), updating quotation status using ID is just for recovery in case of executing this command erroneously. You can find order ID in log. + +Please refer to more detailed description about this command by running:: + + $ ./odoo-glue.py help update-quote diff --git a/odoo/glue.ini.example b/odoo/glue.ini.example new file mode 100644 index 0000000..af0b57e --- /dev/null +++ b/odoo/glue.ini.example @@ -0,0 +1,8 @@ +[odoo] +version=8.0 +hostname=localhost +port=443 +protocol=jsonrpc+ssl +database=test +user=admin +password=admin diff --git a/odoo/odoo-glue.py b/odoo/odoo-glue.py new file mode 100755 index 0000000..8a13b44 --- /dev/null +++ b/odoo/odoo-glue.py @@ -0,0 +1,698 @@ +#!/usr/bin/env python +# +# Copyright 2015 Catalyst IT Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import argparse +import collections +import ConfigParser +import datetime +from decimal import Decimal +import math +import oerplib +import os +import prettytable +import re +import six +import sys +import time +import traceback + +from distilclient.client import Client as DistilClient +from keystoneclient.v2_0 import client as keystone_client +import odoorpc +from oslo_utils import importutils +from oslo_utils import strutils + + +TENANT = collections.namedtuple('Tenant', ['id', 'name']) +REGION_MAPPING = {'nz_wlg_2': 'NZ-WLG-2', 'nz-por-1': 'NZ-POR-1'} +OERP_PRODUCTS = {} + +TRAFFIC_MAPPING = {'n1.international-in': 'Inbound International Traffic', + 'n1.international-out': 'Outbound International Traffic', + 'n1.national-in': 'Inbound National Traffic', + 'n1.national-out': 'Outbound National Traffic'} + +def arg(*args, **kwargs): + def _decorator(func): + func.__dict__.setdefault('arguments', []).insert(0, (args, kwargs)) + return func + return _decorator + + +class OdooShell(object): + + def get_base_parser(self): + parser = argparse.ArgumentParser( + prog='odoo-glue', + description='Odoo glue script for Catalyst Cloud billing.', + add_help=False, + ) + + # Global arguments + parser.add_argument('-h', '--help', + action='store_true', + help=argparse.SUPPRESS, + ) + + parser.add_argument('-a', '--os-auth-url', metavar='OS_AUTH_URL', + type=str, required=False, dest='OS_AUTH_URL', + default=os.environ.get('OS_AUTH_URL', None), + help='Keystone Authentication URL') + + parser.add_argument('-u', '--os-username', metavar='OS_USERNAME', + type=str, required=False, dest='OS_USERNAME', + default=os.environ.get('OS_USERNAME', None), + help='Username for authentication') + + parser.add_argument('-p', '--os-password', metavar='OS_PASSWORD', + type=str, required=False, dest='OS_PASSWORD', + default=os.environ.get('OS_PASSWORD', None), + help='Password for authentication') + + parser.add_argument('-t', '--os-tenant-name', + metavar='OS_TENANT_NAME', + type=str, required=False, + dest='OS_TENANT_NAME', + default=os.environ.get('OS_TENANT_NAME', None), + help='Tenant name for authentication') + + parser.add_argument('-r', '--os-region-name', + metavar='OS_REGION_NAME', + type=str, required=False, + dest='OS_REGION_NAME', + default=os.environ.get('OS_REGION_NAME', None), + help='Region for authentication') + + parser.add_argument('-c', '--os-cacert', metavar='OS_CACERT', + dest='OS_CACERT', + default=os.environ.get('OS_CACERT'), + help='Path of CA TLS certificate(s) used to ' + 'verify the remote server\'s certificate. ' + 'Without this option glance looks for the ' + 'default system CA certificates.') + + parser.add_argument('-k', '--insecure', + default=False, + action='store_true', dest='OS_INSECURE', + help='Explicitly allow script to perform ' + '\"insecure SSL\" (https) requests. ' + 'The server\'s certificate will not be ' + 'verified against any certificate authorities.' + ' This option should be used with caution.') + + parser.add_argument('-d', '--debug', + default=False, + action='store_true', dest='DEBUG', + help='Print the details of running.') + + return parser + + def get_subcommand_parser(self): + parser = self.get_base_parser() + self.subcommands = {} + subparsers = parser.add_subparsers(metavar='') + submodule = importutils.import_module('odoo-glue') + self._find_actions(subparsers, submodule) + self._find_actions(subparsers, self) + return parser + + def _find_actions(self, subparsers, actions_module): + for attr in (a for a in dir(actions_module) if a.startswith('do_')): + command = attr[3:].replace('_', '-') + callback = getattr(actions_module, attr) + desc = callback.__doc__ or '' + help = desc.strip().split('\n')[0] + arguments = getattr(callback, 'arguments', []) + + subparser = subparsers.add_parser(command, + help=help, + description=desc, + add_help=False, + formatter_class=HelpFormatter + ) + subparser.add_argument('-h', '--help', + action='help', + help=argparse.SUPPRESS, + ) + self.subcommands[command] = subparser + for (args, kwargs) in arguments: + subparser.add_argument(*args, **kwargs) + subparser.set_defaults(func=callback) + + @arg('command', metavar='', nargs='?', + help='Display help for .') + def do_help(self, args): + """Display help about this program or one of its subcommands. + + """ + if getattr(args, 'command', None): + if args.command in self.subcommands: + self.subcommands[args.command].print_help() + else: + raise Exception("'%s' is not a valid subcommand" % + args.command) + else: + self.parser.print_help() + + def init_client(self, args): + try: + keystone = keystone_client.Client(username=args.OS_USERNAME, + password=args.OS_PASSWORD, + tenant_name=args.OS_TENANT_NAME, + auth_url=args.OS_AUTH_URL, + region_name=args.OS_REGION_NAME, + cacert=args.OS_CACERT, + insecure=args.OS_INSECURE) + self.keystone = keystone + + for region in REGION_MAPPING.keys(): + d = DistilClient(os_username=args.OS_USERNAME, + os_password=args.OS_PASSWORD, + os_tenant_name=args.OS_TENANT_NAME, + os_auth_url=args.OS_AUTH_URL, + os_region_name=region, + os_cacert=args.OS_CACERT, + insecure=args.OS_INSECURE) + setattr(self, 'distil' + region.replace('-', '_'), d) + + self.debug = args.DEBUG + except Exception: + exc_type, exc_value, exc_traceback = sys.exc_info() + traceback.print_exception(exc_type, exc_value, exc_traceback, + limit=2, file=sys.stdout) + sys.exit(1) + + def main(self, argv): + parser = self.get_base_parser() + (options, args) = parser.parse_known_args(argv) + + subcommand_parser = self.get_subcommand_parser() + self.parser = subcommand_parser + + if options.help or not argv: + self.do_help(options) + return 0 + + args = subcommand_parser.parse_args(argv) + if args.func == self.do_help: + self.do_help(args) + return 0 + + try: + self.init_client(args) + args.func(self, args) + except Exception: + exc_type, exc_value, exc_traceback = sys.exc_info() + traceback.print_exception(exc_type, exc_value, exc_traceback, + limit=2, file=sys.stdout) + sys.exit(1) + + +class HelpFormatter(argparse.HelpFormatter): + def start_section(self, heading): + # Title-case the headings + heading = '%s%s' % (heading[0].upper(), heading[1:]) + super(HelpFormatter, self).start_section(heading) + + +@arg('--tenant-id', type=str, metavar='TENANT_ID', + dest='TENANT_ID', required=False, + help='The specific tenant which will be quoted.') +@arg('--start', type=str, metavar='START', + dest='START', required=True, + help='Start date for quote.') +@arg('--end', type=str, metavar='END', + dest='END', required=True, + help='End date for quote.') +@arg('--dry-run', type=bool, metavar='DRY_RUN', + dest='DRY_RUN', required=False, default=False, + help='Do not actually create the sales order in Odoo.') +@arg('--audit', type=bool, metavar='AUDIT', + dest='AUDIT', required=False, default=False, + help='Do nothing but check if there is out-of-sync between OpenStack' + ' and OpenERP') +def do_quote(shell, args): + """ + Iterate all tenants from OpenStack and create sales order in Odoo. + """ + user_roles = shell.keystone.session.auth.auth_ref['user']['roles'] + if {u'name': u'admin'} not in user_roles: + print('Admin permission is required.') + return + + login_odoo(shell) + + done = [] + skip = [] + + if not args.TENANT_ID: + tenants = shell.keystone.tenants.list() + + try: + with open('done_tenants.txt') as f: + done = f.read().splitlines() + + with open('skip_tenants.txt') as f: + skip = f.read().splitlines() + except IOError: + pass + else: + tenant_object = shell.keystone.tenants.get(args.TENANT_ID) + tenants = [TENANT(id=args.TENANT_ID, name=tenant_object.name)] + + for tenant in tenants: + if tenant.id in done and not args.AUDIT: + print ("Skipping tenant: %s already completed." % tenant.name) + continue + + if tenant.id in skip and not args.AUDIT: + print ("Skipping tenant: %s already skipped." % tenant.name) + continue + + partner = find_oerp_partner_for_tenant(shell, tenant) + if not partner or args.AUDIT: + continue + root_partner = find_root_partner(shell, partner) + + usage = get_tenant_usage(shell, tenant.id, args.START, args.END) + if not usage: + continue + + pricelist, _ = root_partner['property_product_pricelist'] + try: + build_sales_order(shell, args, pricelist, usage, partner, + tenant.name, tenant.id) + except Exception as e: + print "Failed to create sales order for tenant: %s" % tenant.name + with open('failed_tenants.txt', 'a') as f: + f.write(tenant.id + "\n") + print('To cancel use order id: %s' % shell.order_id) + raise e + + with open('done_tenants.txt', 'a') as f: + f.write(tenant.id + "\n") + + +def find_oerp_partner_for_tenant(shell, tenant): + try: + tenant_obj_ids = shell.Tenant.search([('tenant_id', '=', tenant.id)]) + + if len(tenant_obj_ids) != 1: + print('ERROR: tenant %s, %s is not set up in OpenERP.' % + (tenant.id, tenant.name)) + return + + tenant_obj = shell.Tenant.read(tenant_obj_ids[0]) + + return shell.Partner.read(tenant_obj['partner_id'][0]) + except odoorpc.error.RPCError as e: + print(e.info) + raise + + +def find_root_partner(shell, partner): + while partner['parent_id']: + parent_id, parent_name = partner['parent_id'] + log(shell.debug, + 'Walking to parent of [%s,%s]: [%s,%s] to find pricelist' % ( + partner['id'], partner['name'], + parent_id, parent_name)) + + partner = shell.Partner.read(parent_id) + + return partner + + +def find_oerp_product(shell, region, name): + product_name = '%s.%s' % (REGION_MAPPING[region], name) + if product_name not in OERP_PRODUCTS: + log(shell.debug, 'Looking up product in oerp: %s' % product_name) + + ps = shell.Product.search([('name_template', '=', product_name), + ('sale_ok', '=', True), + ('active', '=', True)]) + if len(ps) > 1: + print('WARNING: %d products found for %s' % (len(ps), + product_name)) + + if len(ps) == 0: + print('ERROR: no matching product for %s' % product_name) + return None + + OERP_PRODUCTS[product_name] = shell.Product.read(ps[0]) + + return OERP_PRODUCTS[product_name] + + +def get_tenant_usage(shell, tenant, start, end): + usage = [] + for region in REGION_MAPPING.keys(): + distil_client = getattr(shell, 'distil' + region.replace('-', '_')) + raw_usage = distil_client.get_usage(tenant, start, end) + + if not raw_usage: + return None + + traffic = {'n1.national-in': 0, + 'n1.national-out': 0, + 'n1.international-in': 0, + 'n1.international-out': 0} + + for res_id, res in raw_usage['usage']['resources'].items(): + for service_usage in res['services']: + if service_usage['volume'] == 'unknown unit conversion': + print('WARNING: Bogus unit: %s' % res.get('type')) + continue + + if service_usage['name'] == 'bandwidth': + #print('WARNING: Metering data for bandwidth; unsupported') + continue + + # server-side rater is broken so do it here. + if service_usage['unit'] == 'byte': + v = Decimal(service_usage['volume']) + service_usage['unit'] = 'gigabyte' + service_usage['volume'] = str(v / + Decimal(1024 * 1024 * 1024)) + + if service_usage['unit'] == 'second': + # convert seconds to hours, rounding up. + v = Decimal(service_usage['volume']) + service_usage['unit'] = 'hour' + service_usage['volume'] = str(math.ceil(v / + Decimal(60 * 60))) + + # drop zero usages. + if not Decimal(service_usage['volume']): + print('WARNING: Dropping 0-volume line: %s' % + (service_usage,)) + continue + + if Decimal(service_usage['volume']) <= 0.00001: + # Precision threshold for volume. + print('WARNING: Dropping 0.00001-volume line: %s' % + (service_usage,)) + continue + + name = res.get('name', res.get('ip address', '')) + if name == '': + name = res_id + + if service_usage['name'] in ('n1.national-in', + 'n1.national-out', + 'n1.international-in', + 'n1.international-out'): + #print('WARNING: We will skip traffic billing for now.') + traffic[service_usage['name']] += float(service_usage['volume']) + else: + usage.append({'product': service_usage['name'], + 'name': name, + 'volume': float(service_usage['volume']), + 'region': region}) + + # Aggregate traffic data + for type, volume in traffic.items(): + print('Region: %s, traffic type: %s, volume: %s' % + (region, type, str(volume))) + usage.append({'product': type, + 'name': TRAFFIC_MAPPING[type], + 'volume': math.floor(volume), + 'region': region}) + + return wash_usage(usage, start, end) + + +def wash_usage(usage, start, end): + """Wash the usage data to filter something we want to skip/cost-free""" + if not usage: + return + start = time.mktime(time.strptime(start, '%Y-%m-%dT%H:%M:%S')) + end = time.mktime(time.strptime(end, '%Y-%m-%dT%H:%M:%S')) + free_hours = (end - start) / 3600 + + network_hours = 0 + router_hours = 0 + region = 'nz_wlg_2' + for u in usage: + if u['product'] == 'n1.network': + network_hours += u['volume'] + + if u['product'] == 'n1.router': + router_hours += u['volume'] + # TODO(flwang): Any region is ok for the discount for now, given + # we're using same price for different region. But we may need + # better way in the future. And at least one network and one + # router are in the same region. So it should be safe to use it + # for displaying the discount line item. + # A special case is user has two network/router in two different + # regions and either of them are not used full month, so at that + # case, user maybe see the discount line item is placed at one of + # regions, but the number should be correct. + region = u['region'] + + free_network_hours = (network_hours if network_hours <= free_hours + else free_hours) + if free_network_hours: + usage.append({'product': 'n1.network', 'name': 'FREE NETWORK TIER', + 'volume': -free_network_hours, 'region': region}) + + free_router_hours = (router_hours if router_hours <= free_hours + else free_hours) + if free_router_hours: + usage.append({'product': 'n1.router', 'name': 'FREE ROUTER TIER', + 'volume': -free_router_hours, 'region': region}) + + return usage + + +def get_price(shell, pricelist, product, volume): + price = shell.Pricelist.price_get([pricelist], product['id'], + volume if volume >= 0 + else 0)[str(pricelist)] + + return price if volume >= 0 else -price + + +def build_sales_order(shell, args, pricelist, usage, partner, tenant_name, + tenant_id): + end_timestamp = datetime.datetime.strptime(args.END, '%Y-%m-%dT%H:%M:%S') + billing_date = str((end_timestamp - datetime.timedelta(days=1)).date()) + + try: + # Pre check, fetch all the products first. + for m in usage: + if not find_oerp_product(shell, m['region'], m['product']): + sys.exit(1) + except Exception as e: + print(e.info) + raise + + log(shell.debug, 'Building sale.order') + try: + order_dict = {'partner_id': partner['id'], + 'pricelist_id': pricelist, + 'partner_invoice_id': partner['id'], + 'partner_shipping_id': partner['id'], + 'order_date': billing_date, + 'note': 'Tenant: %s (%s)' % (tenant_name, tenant_id), + 'section_id': 10, + } + order = 'DRY_RUN_MODE' + print_dict(order_dict) + + if not args.DRY_RUN: + order = shell.Order.create(order_dict) + shell.order_id = order + print('Order id: %s' % order) + + # Sort by product + usage_dict_list = [] + for m in sorted(usage, key=lambda m: m['product']): + prod = find_oerp_product(shell, m['region'], m['product']) + + # TODO(flwang): 1. select the correct unit; 2. map via position + usage_dict = {'order_id': order, + 'product_id': prod['id'], + 'product_uom': prod['uom_id'][0], + 'product_uom_qty': math.fabs(m['volume']), + 'name': m['name'], + 'price_unit': get_price(shell, pricelist, + prod, m['volume']) + } + if usage_dict['product_uom_qty'] < 0.005: + # Odoo will round the product_uom_qty and if it's under 0.0005 + # then it would be rounded to 0 and as a result the quoting + # will fail. + print('%s is too small.' % str(usage_dict['product_uom_qty'])) + continue + + usage_dict_list.append(usage_dict) + + if not args.DRY_RUN: + shell.Orderline.create(usage_dict) + + print_list(usage_dict_list, ['product_id', 'product_uom', + 'product_uom_qty', 'name', 'price_unit']) + except odoorpc.error.RPCError as e: + exc_type, exc_value, exc_traceback = sys.exc_info() + traceback.print_exception(exc_type, exc_value, exc_traceback, + limit=2, file=sys.stdout) + print(e.info) + raise e + except Exception as e: + exc_type, exc_value, exc_traceback = sys.exc_info() + traceback.print_exception(exc_type, exc_value, exc_traceback, + limit=2, file=sys.stdout) + print(e) + raise e + + +def dump_all(shell, model, fields): + """Only for debug """ + print('%s:' % model) + ids = shell.oerp.search(model, []) + for _id in ids: + obj = shell.oerp.read(model, _id) + print(' %s %s' % (_id, {f: obj[f] for f in fields})) + + +def log(debug, msg): + """A tiny log method to print running details.""" + if debug: + print(msg) + + +def print_list(objs, fields, formatters={}): + pt = prettytable.PrettyTable([f for f in fields], caching=False) + pt.align = 'l' + + for o in objs: + row = [] + for field in fields: + if field in formatters: + row.append(formatters[field](o)) + else: + field_name = field.lower().replace(' ', '_') + if type(o) == dict and field in o: + data = o[field_name] + else: + data = getattr(o, field_name, None) or '' + row.append(data) + pt.add_row(row) + + print(strutils.encodeutils.safe_encode((pt.get_string()))) + + +def login_odoo(shell): + conf = ConfigParser.ConfigParser() + conf.read(['glue.ini']) + + shell.oerp = odoorpc.ODOO(conf.get('odoo', 'hostname'), + protocol=conf.get('odoo', 'protocol'), + port=conf.getint('odoo', 'port'), + version=conf.get('odoo', 'version')) + + shell.oerp.login(conf.get('odoo', 'database'), + conf.get('odoo', 'user'), + conf.get('odoo', 'password')) + + shell.Order = shell.oerp.env['sale.order'] + shell.Orderline = shell.oerp.env['sale.order.line'] + shell.Tenant = shell.oerp.env['cloud.tenant'] + shell.Partner = shell.oerp.env['res.partner'] + shell.Pricelist = shell.oerp.env['product.pricelist'] + shell.Product = shell.oerp.env['product.product'] + + +def check_duplicate(order): + return False + + +@arg('--new-status', '-s', type=str, metavar='STATUS', + dest='STATUS', required=True, + choices=['manual', 'cancel', 'draft'], + help='The new status of the quotation.') +@arg('--company', '-c', type=str, metavar='COMPANY', + dest='COMPANY', required=False, + help='Company of the quotation customer to filter with.') +@arg('--tenant-id', '-t', type=str, metavar='TENANT_ID', + dest='TENANT_ID', required=False, + help='Tenant of quotations to filter with.') +@arg('--id', type=str, metavar='ORDER_ID', + dest='ORDER_ID', required=False, + help='Order ID to update. If it is given, COMPANY and TENANT_ID will be' + 'ignored. NOTE: This is NOT the Quotation Number.') +def do_update_quote(shell, args): + """Updates quotations.""" + login_odoo(shell) + + if args.ORDER_ID: + creterion = [('id', '=', args.ORDER_ID)] + else: + creterion = [('state', '=', 'draft')] + if args.COMPANY: + creterion.append(('company_id.name', 'ilike', args.COMPANY)) + if args.TENANT_ID: + tenant_object = shell.keystone.tenants.get(args.TENANT_ID) + partner = find_oerp_partner_for_tenant(shell, tenant_object) + creterion.append(('partner_id', '=', partner['id'])) + + ids = shell.Order.search(creterion) + for id in ids: + try: + print('Processing order: %s' % id) + order = shell.Order.browse(id) + + # Just a placeholder for further improvement. + is_dup = check_duplicate(order) + + if not is_dup: + print "changing state: %s -> %s" % (order.state, args.STATUS) + # By default when updating values of a record, the change is + # automatically sent to the server. + order.state = args.STATUS + except odoorpc.error.RPCError as e: + exc_type, exc_value, exc_traceback = sys.exc_info() + traceback.print_exception(exc_type, exc_value, exc_traceback, + limit=2, file=sys.stdout) + print(e.info) + print('Failed to update order: %s' % id) + except Exception as e: + exc_type, exc_value, exc_traceback = sys.exc_info() + traceback.print_exception(exc_type, exc_value, exc_traceback, + limit=2, file=sys.stdout) + print(e) + print('Failed to update order: %s' % id) + + +def print_dict(d, max_column_width=80): + pt = prettytable.PrettyTable(['Property', 'Value'], caching=False) + pt.align = 'l' + pt.max_width = max_column_width + [pt.add_row(list(r)) for r in six.iteritems(d)] + print(strutils.encodeutils.safe_encode(pt.get_string(sortby='Property'))) + + +if __name__ == '__main__': + try: + OdooShell().main(sys.argv[1:]) + except KeyboardInterrupt: + print("Terminating...") + sys.exit(1) + except Exception as e: + exc_type, exc_value, exc_traceback = sys.exc_info() + traceback.print_exception(exc_type, exc_value, exc_traceback, + limit=2, file=sys.stdout) diff --git a/odoo/odoo-products-snapshot.py b/odoo/odoo-products-snapshot.py new file mode 100755 index 0000000..5216392 --- /dev/null +++ b/odoo/odoo-products-snapshot.py @@ -0,0 +1,75 @@ +#!/usr/bin/env python2 +import oerplib +import sys +import os +import pprint +import argparse +import math +import ConfigParser +from decimal import Decimal +# requires distilclient>=0.5.1 +from distilclient.client import Client as DistilClient + + +parser = argparse.ArgumentParser() +parser.add_argument('--start', required=True, help='Start date') +parser.add_argument('--end', required=True, help='End date') + +args = parser.parse_args() + +conf = ConfigParser.ConfigParser() +conf.read(['glue.ini']) + +region = conf.get('openstack', 'region') + +oerp = oerplib.OERP(conf.get('odoo', 'hostname'), + protocol=conf.get('odoo', 'protocol'), + port=conf.getint('odoo', 'port'), + version=conf.get('odoo', 'version')) +oerp.login(conf.get('odoo', 'user'), + conf.get('odoo', 'password'), + conf.get('odoo', 'database')) + +# debug helper +def dump_all(model, fields, conds=None): + print '%s:' % model + ids = oerp.search(model, conds or []) + objs = oerp.read(model, ids) + for obj in objs: + print ' %s %s' % (obj['id'], {f:obj[f] for f in fields}) + +pricelist_model = oerp.get('product.pricelist') +pricelist = oerp.search('product.pricelist', + [('name', '=', conf.get('odoo', 'export_pricelist'))]) + +product_category = oerp.search('product.category', + [('name', '=', conf.get('odoo', 'product_category'))]) + +product_ids = oerp.search('product.product', + [('categ_id', '=', product_category[0]), + ('sale_ok', '=', True), + ('active', '=', True)]) + +products = oerp.read('product.product', product_ids) + +prices = {} + +for p in products: + if not p['name_template'].startswith(region + '.'): + continue + base_name = p['name_template'][len(region)+1:] + # exported prices are for one unit -- do not take into account + # any bulk pricing rules. + unit_price = pricelist_model.price_get([pricelist[0]], p['id'], 1)[str(pricelist[0])] + prices[base_name] = unit_price + print '%s %s' % (base_name, unit_price) + +# create the snapshot in distil +dc = DistilClient( + os_username=os.getenv('OS_USERNAME'), + os_password=os.getenv('OS_PASSWORD'), + os_tenant_id=os.getenv('OS_TENANT_ID'), + os_auth_url=os.getenv('OS_AUTH_URL'), + os_region_name=os.getenv('OS_REGION_NAME')) + +dc.set_prices(args.start, args.end, prices) diff --git a/odoo/requirements.txt b/odoo/requirements.txt new file mode 100644 index 0000000..340e8d3 --- /dev/null +++ b/odoo/requirements.txt @@ -0,0 +1,3 @@ +argparse==1.2.1 +odoorpc==0.4.2 +wsgiref==0.1.2 diff --git a/requirements.txt b/requirements.txt index 22799e6..a4f8d6b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,3 @@ -python-novaclient>=2.17 Babel==1.3 Flask==0.10.1 Jinja2==2.7.2 @@ -18,18 +17,21 @@ iso8601==0.1.8 itsdangerous==0.23 mock==1.0.1 netaddr==0.7.10 -nose==1.3.0 -oslo.config==1.2.1 -pbr==0.6 +#nose==1.3.0 prettytable==0.7.2 psycopg2==2.5.2 pyaml==13.07.0 -python-keystoneclient==0.3.2 pytz==2013.9 requests==1.1.0 requirements-parser==0.0.6 simplejson==3.3.3 -six==1.5.2 urllib3==1.5 waitress==0.8.8 wsgiref==0.1.2 + +six>=1.7.0 +pbr>=0.6,!=0.7,<1.0 + +python-novaclient>=2.17.0 +python-cinderclient>=1.0.8 +keystonemiddleware!=4.1.0,>=4.0.0 # Apache-2.0 diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..b89a914 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,33 @@ +[metadata] +name = distil +version = 2014.1 +summary = Distil project +description-file = README.md +license = Apache Software License +classifiers = + Programming Language :: Python + Programming Language :: Python :: 2 + Programming Language :: Python :: 2.7 + Environment :: OpenStack + Intended Audience :: Information Technology + Intended Audience :: System Administrators + License :: OSI Approved :: Apache Software License + Operating System :: POSIX :: Linux +author = OpenStack +author-email = openstack-dev@lists.openstack.org +home-page = http://docs.openstack.org/developer/distil/ + +[global] +setup-hooks = pbr.hooks.setup_hook + +[files] +packages = + distil + +data_files = + share/distil = etc/distil/* + +[build_sphinx] +all_files = 1 +build-dir = doc/build +source-dir = doc/source diff --git a/setup.py b/setup.py index 953c550..7363757 100644 --- a/setup.py +++ b/setup.py @@ -1,12 +1,30 @@ -from setuptools import setup +#!/usr/bin/env python +# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. -setup(name='distil', - version='0.1', - description='Distil, a set of APIs for creating billable items from Openstack-Ceilometer', - author='Aurynn Shaw', - author_email='aurynn@catalyst.net.nz', - contributors=["Chris Forbes", "Adrian Turjak"], - contributor_emails=["chris.forbes@catalyst.net.nz", "adriant@catalyst.net.nz"], - url='https://github.com/catalyst/distil', - packages=["distil", "distil.api", "distil.models"] - ) +# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT +import setuptools + +# In python < 2.7.4, a lazy loading of package `pbr` will break +# setuptools if some other modules registered functions in `atexit`. +# solution from: http://bugs.python.org/issue15881#msg170215 +try: + import multiprocessing # noqa +except ImportError: + pass + +setuptools.setup( + setup_requires=['pbr'], + pbr=True) diff --git a/test-requirements.txt b/test-requirements.txt new file mode 100644 index 0000000..9188675 --- /dev/null +++ b/test-requirements.txt @@ -0,0 +1,13 @@ +hacking>=0.9.2,<0.10 +coverage>=3.6 +discover +fixtures>=0.3.14 +oslosphinx +oslotest +pylint==0.25.2 +sphinx>=1.1.2,!=1.2.0,<1.3 +sphinxcontrib-httpdomain +sqlalchemy-migrate>=0.9.1 +testrepository>=0.0.18 +testscenarios>=0.4 +testtools>=0.9.34 diff --git a/tests/__init__.py b/tests/__init__.py deleted file mode 100644 index fc4b5b1..0000000 --- a/tests/__init__.py +++ /dev/null @@ -1,54 +0,0 @@ -# Copyright (C) 2014 Catalyst IT Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import subprocess -from sqlalchemy import create_engine -from sqlalchemy.orm import sessionmaker, scoped_session,create_session - -from sqlalchemy.pool import NullPool -from distil.models import Resource, Tenant, UsageEntry, SalesOrder, Base -from distil import config -from .constants import DATABASE_NAME, PG_DATABASE_URI, MY_DATABASE_URI -from .constants import config as test_config - - -def setUp(): - subprocess.call(["/usr/bin/createdb","%s" % DATABASE_NAME]) - subprocess.call(["mysql", "-u", "root","--password=password", "-e", "CREATE DATABASE %s" % DATABASE_NAME]) - mysql_engine = create_engine(MY_DATABASE_URI, poolclass=NullPool) - pg_engine = create_engine(PG_DATABASE_URI, poolclass=NullPool) - Base.metadata.create_all(bind=mysql_engine) - Base.metadata.create_all(bind=pg_engine) - - mysql_engine.dispose() - pg_engine.dispose() - - # setup test config: - config.setup_config(test_config) - - -def tearDown(): - - mysql_engine = create_engine(MY_DATABASE_URI, poolclass=NullPool) - pg_engine = create_engine(PG_DATABASE_URI, poolclass=NullPool) - - Base.metadata.drop_all(bind=mysql_engine) - Base.metadata.drop_all(bind=pg_engine) - - mysql_engine.dispose() - pg_engine.dispose() - - - subprocess.call(["/usr/bin/dropdb","%s" % DATABASE_NAME]) - subprocess.call(["mysql", "-u", "root", "--password=password", "-e", "DROP DATABASE %s" % DATABASE_NAME]) diff --git a/tests/constants.py b/tests/constants.py deleted file mode 100644 index 568eaa5..0000000 --- a/tests/constants.py +++ /dev/null @@ -1,66 +0,0 @@ -# Copyright (C) 2014 Catalyst IT Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -DATABASE_NAME = "test_distil" - -PG_DATABASE_URI = "postgresql://aurynn:postgres@localhost/%s" % DATABASE_NAME -MY_DATABASE_URI = "mysql://root:password@localhost/%s" % DATABASE_NAME - - -config = { - "main": { - "region": "Wellington", - "timezone": "Pacific/Auckland", - "database_uri": PG_DATABASE_URI, - "log_file": "logs/tests.log" - }, - "rates_config": { - "file": "examples/test_rates.csv" - }, - "auth": { - "end_point": "http://localhost:35357/v2.0", - "username": "admin", - "password": "openstack", - "default_tenant": "demo", - "insecure": False, - }, - "ceilometer": { - "host": "http://localhost:8777/" - }, - "transformers": { - "uptime": { - "tracked_states": ["active", "building", - "paused", "rescued", "resized"] - }, - "from_image": { - "service": "volume.size", - "md_keys": ["image_ref", "image_meta.base_image_ref"], - "none_values": ["None", ""], - "size_keys": ["root_gb"] - } - }, - "collection": {} -} - -# from test data: -TENANT_ID = "cd3deadd3d5a4f11802d03928195f4ef" - -TENANTS = [ - {u'enabled': True, - u'description': None, - u'name': u'demo', - u'id': u'cd3deadd3d5a4f11802d03928195f4ef'} -] - -AUTH_TOKEN = "ASDFTOKEN" diff --git a/tests/test_transformers.py b/tests/test_transformers.py deleted file mode 100644 index 86912b2..0000000 --- a/tests/test_transformers.py +++ /dev/null @@ -1,352 +0,0 @@ -# Copyright (C) 2014 Catalyst IT Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import distil.transformers -from distil import constants -from distil.constants import states -import unittest -import mock -import datetime - -def p(timestr): - return datetime.datetime.strptime(timestr, constants.date_format) - -class testdata: - # string timestamps to put in meter data - t0 = p('2014-01-01T00:00:00') - t0_10 = p('2014-01-01T00:10:00') - t0_20 = p('2014-01-01T00:30:00') - t0_30 = p('2014-01-01T00:30:00') - t0_40 = p('2014-01-01T00:40:00') - t0_50 = p('2014-01-01T00:50:00') - t1 = p('2014-01-01T01:00:00') - - # and one outside the window - tpre = p('2013-12-31T23:50:00') - - flavor = '1' - flavor2 = '2' - - -class TestMeter(object): - def __init__(self, data, mtype=None): - self.data = data - self.type = mtype - - def usage(self): - return self.data - - -class UptimeTransformerTests(unittest.TestCase): - - def _run_transform(self, data): - xform = distil.transformers.Uptime() - with mock.patch('distil.helpers.flavor_name') as flavor_name: - flavor_name.side_effect = lambda x: x - return xform.transform_usage('state', data, testdata.t0, - testdata.t1) - - def test_trivial_run(self): - """ - Test that an no input data produces empty uptime. - """ - state = [] - result = self._run_transform(state) - self.assertEqual({}, result) - - def test_online_constant_flavor(self): - """ - Test that a machine online for a 1h period with constant - flavor works and gives 1h of uptime. - """ - state = [ - {'timestamp': testdata.t0, 'counter_volume': states['active'], - 'resource_metadata': {'flavor.id': testdata.flavor}}, - {'timestamp': testdata.t1, 'counter_volume': states['active'], - 'resource_metadata': {'flavor.id': testdata.flavor}} - ] - - result = self._run_transform(state) - # there should be one hour of usage. - self.assertEqual({testdata.flavor: 3600}, result) - - def test_offline_constant_flavor(self): - """ - Test that a machine offline for a 1h period with constant flavor - works and gives zero uptime. - """ - - state = [ - {'timestamp': testdata.t0, 'counter_volume': states['stopped'], - 'resource_metadata': {'flavor.id': testdata.flavor}}, - {'timestamp': testdata.t1, 'counter_volume': states['stopped'], - 'resource_metadata': {'flavor.id': testdata.flavor}} - ] - - result = self._run_transform(state) - # there should be no usage, the machine was off. - self.assertEqual({}, result) - - def test_shutdown_during_period(self): - """ - Test that a machine run for 0.5 then shutdown gives 0.5h uptime. - """ - state = [ - {'timestamp': testdata.t0, 'counter_volume': states['active'], - 'resource_metadata': {'flavor.id': testdata.flavor}}, - {'timestamp': testdata.t0_30, 'counter_volume': states['stopped'], - 'resource_metadata': {'flavor.id': testdata.flavor}}, - {'timestamp': testdata.t1, 'counter_volume': states['stopped'], - 'resource_metadata': {'flavor.id': testdata.flavor}} - ] - - result = self._run_transform(state) - # there should be half an hour of usage. - self.assertEqual({testdata.flavor: 1800}, result) - - def test_online_flavor_change(self): - """ - Test that a machine run for 0.5h as m1.tiny, resized to m1.large, - and run for a further 0.5 yields 0.5h of uptime in each class. - """ - state = [ - {'timestamp': testdata.t0, 'counter_volume': states['active'], - 'resource_metadata': {'flavor.id': testdata.flavor}}, - {'timestamp': testdata.t0_30, 'counter_volume': states['active'], - 'resource_metadata': {'flavor.id': testdata.flavor2}}, - {'timestamp': testdata.t1, 'counter_volume': states['active'], - 'resource_metadata': {'flavor.id': testdata.flavor2}} - ] - - result = self._run_transform(state) - # there should be half an hour of usage in each of m1.tiny and m1.large - self.assertEqual({testdata.flavor: 1800, testdata.flavor2: 1800}, - result) - - def test_period_leadin_none_available(self): - """ - Test that if the first data point is well into the window, and we had - no lead-in data, we assume no usage until our first real data point. - """ - state = [ - {'timestamp': testdata.t0_10, 'counter_volume': states['active'], - 'resource_metadata': {'flavor.id': testdata.flavor}}, - {'timestamp': testdata.t1, 'counter_volume': states['active'], - 'resource_metadata': {'flavor.id': testdata.flavor}} - ] - - result = self._run_transform(state) - # there should be 50 minutes of usage; we have no idea what happened - # before that so we don't try to bill it. - self.assertEqual({testdata.flavor: 3000}, result) - - def test_period_leadin_available(self): - """ - Test that if the first data point is well into the window, but we *do* - have lead-in data, then we use the lead-in clipped to the start of the - window. - """ - state = [ - {'timestamp': testdata.tpre, 'counter_volume': states['active'], - 'resource_metadata': {'flavor.id': testdata.flavor}}, - {'timestamp': testdata.t0_10, 'counter_volume': states['active'], - 'resource_metadata': {'flavor.id': testdata.flavor}}, - {'timestamp': testdata.t1, 'counter_volume': states['active'], - 'resource_metadata': {'flavor.id': testdata.flavor}} - ] - - result = self._run_transform(state) - # there should be 60 minutes of usage; we have no idea what - # happened before that so we don't try to bill it. - self.assertEqual({testdata.flavor: 3600}, result) - - -class GaugeMaxTransformerTests(unittest.TestCase): - - def test_all_different_values(self): - """ - Tests that the transformer correctly grabs the highest value, - when all values are different. - """ - - data = [ - {'timestamp': testdata.t0, 'counter_volume': 12}, - {'timestamp': testdata.t0_10, 'counter_volume': 3}, - {'timestamp': testdata.t0_20, 'counter_volume': 7}, - {'timestamp': testdata.t0_30, 'counter_volume': 3}, - {'timestamp': testdata.t0_40, 'counter_volume': 25}, - {'timestamp': testdata.t0_50, 'counter_volume': 2}, - {'timestamp': testdata.t1, 'counter_volume': 6}, - ] - - xform = distil.transformers.GaugeMax() - usage = xform.transform_usage('some_meter', data, testdata.t0, - testdata.t1) - - self.assertEqual({'some_meter': 25}, usage) - - def test_all_same_values(self): - """ - Tests that that transformer correctly grabs any value, - when all values are the same. - """ - - data = [ - {'timestamp': testdata.t0, 'counter_volume': 25}, - {'timestamp': testdata.t0_30, 'counter_volume': 25}, - {'timestamp': testdata.t1, 'counter_volume': 25}, - ] - - xform = distil.transformers.GaugeMax() - usage = xform.transform_usage('some_meter', data, testdata.t0, - testdata.t1) - - self.assertEqual({'some_meter': 25}, usage) - - -class GaugeSumTransformerTests(unittest.TestCase): - - def test_basic_sum(self): - """ - Tests that the transformer correctly calculate the sum value. - """ - - data = [ - {'timestamp': p('2014-01-01T00:00:00'), 'counter_volume': 1}, - {'timestamp': p('2014-01-01T00:10:00'), 'counter_volume': 1}, - {'timestamp': p('2014-01-01T01:00:00'), 'counter_volume': 1}, - ] - - xform = distil.transformers.GaugeSum() - usage = xform.transform_usage('fake_meter', data, testdata.t0, - testdata.t1) - - self.assertEqual({'fake_meter': 2}, usage) - - -class FromImageTransformerTests(unittest.TestCase): - """ - These tests rely on config settings for from_image, - as defined in test constants, or in conf.yaml - """ - - def test_from_volume_case(self): - """ - If instance is booted from volume transformer should return none. - """ - data = [ - {'timestamp': testdata.t0, - 'resource_metadata': {'image_ref': ""}}, - {'timestamp': testdata.t0_30, - 'resource_metadata': {'image_ref': "None"}}, - {'timestamp': testdata.t1, - 'resource_metadata': {'image_ref': "None"}} - ] - - data2 = [ - {'timestamp': testdata.t0_30, - 'resource_metadata': {'image_ref': "None"}} - ] - - xform = distil.transformers.FromImage() - usage = xform.transform_usage('instance', data, testdata.t0, - testdata.t1) - usage2 = xform.transform_usage('instance', data2, testdata.t0, - testdata.t1) - - self.assertEqual(None, usage) - self.assertEqual(None, usage2) - - def test_default_to_from_volume_case(self): - """ - Unless all image refs contain something, assume booted from volume. - """ - data = [ - {'timestamp': testdata.t0, - 'resource_metadata': {'image_ref': ""}}, - {'timestamp': testdata.t0_30, - 'resource_metadata': {'image_ref': "d5a4f118023928195f4ef"}}, - {'timestamp': testdata.t1, - 'resource_metadata': {'image_ref': "None"}} - ] - - xform = distil.transformers.FromImage() - usage = xform.transform_usage('instance', data, testdata.t0, - testdata.t1) - - self.assertEqual(None, usage) - - def test_from_image_case(self): - """ - If all image refs contain something, should return entry. - """ - data = [ - {'timestamp': testdata.t0, - 'resource_metadata': {'image_ref': "d5a4f118023928195f4ef", - 'root_gb': "20"}}, - {'timestamp': testdata.t0_30, - 'resource_metadata': {'image_ref': "d5a4f118023928195f4ef", - 'root_gb': "20"}}, - {'timestamp': testdata.t1, - 'resource_metadata': {'image_ref': "d5a4f118023928195f4ef", - 'root_gb': "20"}} - ] - - xform = distil.transformers.FromImage() - usage = xform.transform_usage('instance', data, testdata.t0, - testdata.t1) - - self.assertEqual({'volume.size': 20}, usage) - - def test_from_image_case_highest_size(self): - """ - If all image refs contain something, - should return entry with highest size from data. - """ - data = [ - {'timestamp': testdata.t0, - 'resource_metadata': {'image_ref': "d5a4f118023928195f4ef", - 'root_gb': "20"}}, - {'timestamp': testdata.t0_30, - 'resource_metadata': {'image_ref': "d5a4f118023928195f4ef", - 'root_gb': "60"}}, - {'timestamp': testdata.t1, - 'resource_metadata': {'image_ref': "d5a4f118023928195f4ef", - 'root_gb': "20"}} - ] - - xform = distil.transformers.FromImage() - usage = xform.transform_usage('instance', data, testdata.t0, - testdata.t1) - - self.assertEqual({'volume.size': 60}, usage) - - -class GaugeNetworkServiceTransformerTests(unittest.TestCase): - - def test_basic_sum(self): - """Tests that the transformer correctly calculate the sum value. - """ - - data = [ - {'timestamp': p('2014-01-01T00:00:00'), 'counter_volume': 1}, - {'timestamp': p('2014-01-01T00:10:00'), 'counter_volume': 0}, - {'timestamp': p('2014-01-01T01:00:00'), 'counter_volume': 2}, - ] - - xform = distil.transformers.GaugeNetworkService() - usage = xform.transform_usage('fake_meter', data, testdata.t0, - testdata.t1) - - self.assertEqual({'fake_meter': 1}, usage) diff --git a/tox.ini b/tox.ini new file mode 100644 index 0000000..7b35c56 --- /dev/null +++ b/tox.ini @@ -0,0 +1,49 @@ +[tox] +envlist = py26,py27,py33,pep8 +minversion = 1.6 +skipsdist = True + +[testenv] +usedevelop = True +install_command = pip install -U {opts} {packages} +setenv = + VIRTUAL_ENV={envdir} + DISCOVER_DIRECTORY=distil/tests/unit +deps = + -r{toxinidir}/requirements.txt + -r{toxinidir}/test-requirements.txt +commands = python setup.py test --slowest --testr-args="{posargs}" +whitelist_externals = bash + +[testenv:py33] +deps = -r{toxinidir}/requirements-py3.txt + -r{toxinidir}/test-requirements-py3.txt + +[testenv:cover] +commands = python setup.py testr --coverage --testr-args='{posargs}' + +[tox:jenkins] +downloadcache = ~/cache/pip + +[testenv:pep8] +commands = + flake8 {posargs} + +[testenv:venv] +commands = {posargs} + +[testenv:docs] +commands = + rm -rf doc/html doc/build + rm -rf doc/source/apidoc doc/source/api + python setup.py build_sphinx + +[testenv:pylint] +setenv = VIRTUAL_ENV={envdir} +commands = bash tools/lintstack.sh + +[flake8] +ignore = F401,H302,H305,H306,H307,H404,H405 +show-source = true +builtins = _ +exclude=.venv,.git,.tox,dist,doc,*openstack/common*,*lib/python*,*egg,tools